Reverting HADOOP-10485 from branch-2.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1596041 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Jason Darrell Lowe 2014-05-19 20:13:14 +00:00
parent 9962a38eb8
commit fb22c7c52f
45 changed files with 7547 additions and 2 deletions

View File

@ -23,8 +23,6 @@ Release 2.5.0 - UNRELEASED
HADOOP-10104. Update jackson to 1.9.13 (Akira Ajisaka via stevel)
HADOOP-10485. Remove dead classes in hadoop-streaming. (wheat9)
HADOOP-10503. Move junit up to v 4.11. (cnauroth)
HADOOP-10535. Make the retry numbers in ActiveStandbyElector configurable.

View File

@ -20,6 +20,7 @@
<Match>
<Or>
<Package name="org.apache.hadoop.streaming" />
<Class name="org.apache.hadoop.record.compiler.JFile" />
</Or>
<Bug pattern="NP_ALWAYS_NULL"/>
</Match>
@ -40,6 +41,10 @@
<Bug pattern="EI_EXPOSE_REP"/>
</Match>
<Match>
<Package name="org.apache.hadoop.record.compiler.generated" />
</Match>
<Match>
<Package name="org.apache.hadoop.record" />
<Or>
@ -48,4 +53,19 @@
<Bug pattern="MS_PKGPROTECT" />
</Or>
</Match>
<Match>
<Class name="org.apache.hadoop.record.meta.Utils" />
<Method name="skip" />
<Bug pattern="BC_UNCONFIRMED_CAST" />
</Match>
<!--
The compareTo method is actually a dummy method that just
throws excpetions. So, no need to override equals. Ignore
-->
<Match>
<Class name="org.apache.hadoop.record.meta.RecordTypeInfo" />
<Bug pattern="EQ_COMPARETO_USE_OBJECT_EQUALS" />
</Match>
</FindBugsFilter>

View File

@ -41,6 +41,11 @@
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.ant</groupId>
<artifactId>ant</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId>

View File

@ -0,0 +1,223 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record;
import java.io.InputStreamReader;
import java.io.InputStream;
import java.io.IOException;
import java.io.PushbackReader;
import java.io.UnsupportedEncodingException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class CsvRecordInput implements RecordInput {
private PushbackReader stream;
private class CsvIndex implements Index {
@Override
public boolean done() {
char c = '\0';
try {
c = (char) stream.read();
stream.unread(c);
} catch (IOException ex) {
}
return (c == '}') ? true : false;
}
@Override
public void incr() {}
}
private void throwExceptionOnError(String tag) throws IOException {
throw new IOException("Error deserializing "+tag);
}
private String readField(String tag) throws IOException {
try {
StringBuilder buf = new StringBuilder();
while (true) {
char c = (char) stream.read();
switch (c) {
case ',':
return buf.toString();
case '}':
case '\n':
case '\r':
stream.unread(c);
return buf.toString();
default:
buf.append(c);
}
}
} catch (IOException ex) {
throw new IOException("Error reading "+tag);
}
}
/** Creates a new instance of CsvRecordInput */
public CsvRecordInput(InputStream in) {
try {
stream = new PushbackReader(new InputStreamReader(in, "UTF-8"));
} catch (UnsupportedEncodingException ex) {
throw new RuntimeException(ex);
}
}
@Override
public byte readByte(String tag) throws IOException {
return (byte) readLong(tag);
}
@Override
public boolean readBool(String tag) throws IOException {
String sval = readField(tag);
return "T".equals(sval) ? true : false;
}
@Override
public int readInt(String tag) throws IOException {
return (int) readLong(tag);
}
@Override
public long readLong(String tag) throws IOException {
String sval = readField(tag);
try {
long lval = Long.parseLong(sval);
return lval;
} catch (NumberFormatException ex) {
throw new IOException("Error deserializing "+tag);
}
}
@Override
public float readFloat(String tag) throws IOException {
return (float) readDouble(tag);
}
@Override
public double readDouble(String tag) throws IOException {
String sval = readField(tag);
try {
double dval = Double.parseDouble(sval);
return dval;
} catch (NumberFormatException ex) {
throw new IOException("Error deserializing "+tag);
}
}
@Override
public String readString(String tag) throws IOException {
String sval = readField(tag);
return Utils.fromCSVString(sval);
}
@Override
public Buffer readBuffer(String tag) throws IOException {
String sval = readField(tag);
return Utils.fromCSVBuffer(sval);
}
@Override
public void startRecord(String tag) throws IOException {
if (tag != null && !tag.isEmpty()) {
char c1 = (char) stream.read();
char c2 = (char) stream.read();
if (c1 != 's' || c2 != '{') {
throw new IOException("Error deserializing "+tag);
}
}
}
@Override
public void endRecord(String tag) throws IOException {
char c = (char) stream.read();
if (tag == null || tag.isEmpty()) {
if (c != '\n' && c != '\r') {
throw new IOException("Error deserializing record.");
} else {
return;
}
}
if (c != '}') {
throw new IOException("Error deserializing "+tag);
}
c = (char) stream.read();
if (c != ',') {
stream.unread(c);
}
return;
}
@Override
public Index startVector(String tag) throws IOException {
char c1 = (char) stream.read();
char c2 = (char) stream.read();
if (c1 != 'v' || c2 != '{') {
throw new IOException("Error deserializing "+tag);
}
return new CsvIndex();
}
@Override
public void endVector(String tag) throws IOException {
char c = (char) stream.read();
if (c != '}') {
throw new IOException("Error deserializing "+tag);
}
c = (char) stream.read();
if (c != ',') {
stream.unread(c);
}
return;
}
@Override
public Index startMap(String tag) throws IOException {
char c1 = (char) stream.read();
char c2 = (char) stream.read();
if (c1 != 'm' || c2 != '{') {
throw new IOException("Error deserializing "+tag);
}
return new CsvIndex();
}
@Override
public void endMap(String tag) throws IOException {
char c = (char) stream.read();
if (c != '}') {
throw new IOException("Error deserializing "+tag);
}
c = (char) stream.read();
if (c != ',') {
stream.unread(c);
}
return;
}
}

View File

@ -0,0 +1,271 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record;
import java.io.InputStream;
import java.io.IOException;
import java.util.ArrayList;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.xml.sax.*;
import org.xml.sax.helpers.DefaultHandler;
import javax.xml.parsers.SAXParserFactory;
import javax.xml.parsers.SAXParser;
/**
* XML Deserializer.
*
* @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class XmlRecordInput implements RecordInput {
static private class Value {
private String type;
private StringBuffer sb;
public Value(String t) {
type = t;
sb = new StringBuffer();
}
public void addChars(char[] buf, int offset, int len) {
sb.append(buf, offset, len);
}
public String getValue() { return sb.toString(); }
public String getType() { return type; }
}
private static class XMLParser extends DefaultHandler {
private boolean charsValid = false;
private ArrayList<Value> valList;
private XMLParser(ArrayList<Value> vlist) {
valList = vlist;
}
@Override
public void startDocument() throws SAXException {}
@Override
public void endDocument() throws SAXException {}
@Override
public void startElement(String ns,
String sname,
String qname,
Attributes attrs) throws SAXException {
charsValid = false;
if ("boolean".equals(qname) ||
"i4".equals(qname) ||
"int".equals(qname) ||
"string".equals(qname) ||
"double".equals(qname) ||
"ex:i1".equals(qname) ||
"ex:i8".equals(qname) ||
"ex:float".equals(qname)) {
charsValid = true;
valList.add(new Value(qname));
} else if ("struct".equals(qname) ||
"array".equals(qname)) {
valList.add(new Value(qname));
}
}
@Override
public void endElement(String ns,
String sname,
String qname) throws SAXException {
charsValid = false;
if ("struct".equals(qname) ||
"array".equals(qname)) {
valList.add(new Value("/"+qname));
}
}
@Override
public void characters(char buf[], int offset, int len)
throws SAXException {
if (charsValid) {
Value v = valList.get(valList.size()-1);
v.addChars(buf, offset, len);
}
}
}
private class XmlIndex implements Index {
@Override
public boolean done() {
Value v = valList.get(vIdx);
if ("/array".equals(v.getType())) {
valList.set(vIdx, null);
vIdx++;
return true;
} else {
return false;
}
}
@Override
public void incr() {}
}
private ArrayList<Value> valList;
private int vLen;
private int vIdx;
private Value next() throws IOException {
if (vIdx < vLen) {
Value v = valList.get(vIdx);
valList.set(vIdx, null);
vIdx++;
return v;
} else {
throw new IOException("Error in deserialization.");
}
}
/** Creates a new instance of XmlRecordInput */
public XmlRecordInput(InputStream in) {
try{
valList = new ArrayList<Value>();
DefaultHandler handler = new XMLParser(valList);
SAXParserFactory factory = SAXParserFactory.newInstance();
SAXParser parser = factory.newSAXParser();
parser.parse(in, handler);
vLen = valList.size();
vIdx = 0;
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
@Override
public byte readByte(String tag) throws IOException {
Value v = next();
if (!"ex:i1".equals(v.getType())) {
throw new IOException("Error deserializing "+tag+".");
}
return Byte.parseByte(v.getValue());
}
@Override
public boolean readBool(String tag) throws IOException {
Value v = next();
if (!"boolean".equals(v.getType())) {
throw new IOException("Error deserializing "+tag+".");
}
return "1".equals(v.getValue());
}
@Override
public int readInt(String tag) throws IOException {
Value v = next();
if (!"i4".equals(v.getType()) &&
!"int".equals(v.getType())) {
throw new IOException("Error deserializing "+tag+".");
}
return Integer.parseInt(v.getValue());
}
@Override
public long readLong(String tag) throws IOException {
Value v = next();
if (!"ex:i8".equals(v.getType())) {
throw new IOException("Error deserializing "+tag+".");
}
return Long.parseLong(v.getValue());
}
@Override
public float readFloat(String tag) throws IOException {
Value v = next();
if (!"ex:float".equals(v.getType())) {
throw new IOException("Error deserializing "+tag+".");
}
return Float.parseFloat(v.getValue());
}
@Override
public double readDouble(String tag) throws IOException {
Value v = next();
if (!"double".equals(v.getType())) {
throw new IOException("Error deserializing "+tag+".");
}
return Double.parseDouble(v.getValue());
}
@Override
public String readString(String tag) throws IOException {
Value v = next();
if (!"string".equals(v.getType())) {
throw new IOException("Error deserializing "+tag+".");
}
return Utils.fromXMLString(v.getValue());
}
@Override
public Buffer readBuffer(String tag) throws IOException {
Value v = next();
if (!"string".equals(v.getType())) {
throw new IOException("Error deserializing "+tag+".");
}
return Utils.fromXMLBuffer(v.getValue());
}
@Override
public void startRecord(String tag) throws IOException {
Value v = next();
if (!"struct".equals(v.getType())) {
throw new IOException("Error deserializing "+tag+".");
}
}
@Override
public void endRecord(String tag) throws IOException {
Value v = next();
if (!"/struct".equals(v.getType())) {
throw new IOException("Error deserializing "+tag+".");
}
}
@Override
public Index startVector(String tag) throws IOException {
Value v = next();
if (!"array".equals(v.getType())) {
throw new IOException("Error deserializing "+tag+".");
}
return new XmlIndex();
}
@Override
public void endVector(String tag) throws IOException {}
@Override
public Index startMap(String tag) throws IOException {
return startVector(tag);
}
@Override
public void endMap(String tag) throws IOException { endVector(tag); }
}

View File

@ -0,0 +1,270 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record;
import java.io.IOException;
import java.util.TreeMap;
import java.util.ArrayList;
import java.io.PrintStream;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.util.Stack;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* XML Serializer.
*
* @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class XmlRecordOutput implements RecordOutput {
private PrintStream stream;
private int indent = 0;
private Stack<String> compoundStack;
private void putIndent() {
StringBuilder sb = new StringBuilder("");
for (int idx = 0; idx < indent; idx++) {
sb.append(" ");
}
stream.print(sb.toString());
}
private void addIndent() {
indent++;
}
private void closeIndent() {
indent--;
}
private void printBeginEnvelope(String tag) {
if (!compoundStack.empty()) {
String s = compoundStack.peek();
if ("struct".equals(s)) {
putIndent();
stream.print("<member>\n");
addIndent();
putIndent();
stream.print("<name>"+tag+"</name>\n");
putIndent();
stream.print("<value>");
} else if ("vector".equals(s)) {
stream.print("<value>");
} else if ("map".equals(s)) {
stream.print("<value>");
}
} else {
stream.print("<value>");
}
}
private void printEndEnvelope(String tag) {
if (!compoundStack.empty()) {
String s = compoundStack.peek();
if ("struct".equals(s)) {
stream.print("</value>\n");
closeIndent();
putIndent();
stream.print("</member>\n");
} else if ("vector".equals(s)) {
stream.print("</value>\n");
} else if ("map".equals(s)) {
stream.print("</value>\n");
}
} else {
stream.print("</value>\n");
}
}
private void insideVector(String tag) {
printBeginEnvelope(tag);
compoundStack.push("vector");
}
private void outsideVector(String tag) throws IOException {
String s = compoundStack.pop();
if (!"vector".equals(s)) {
throw new IOException("Error serializing vector.");
}
printEndEnvelope(tag);
}
private void insideMap(String tag) {
printBeginEnvelope(tag);
compoundStack.push("map");
}
private void outsideMap(String tag) throws IOException {
String s = compoundStack.pop();
if (!"map".equals(s)) {
throw new IOException("Error serializing map.");
}
printEndEnvelope(tag);
}
private void insideRecord(String tag) {
printBeginEnvelope(tag);
compoundStack.push("struct");
}
private void outsideRecord(String tag) throws IOException {
String s = compoundStack.pop();
if (!"struct".equals(s)) {
throw new IOException("Error serializing record.");
}
printEndEnvelope(tag);
}
/** Creates a new instance of XmlRecordOutput */
public XmlRecordOutput(OutputStream out) {
try {
stream = new PrintStream(out, true, "UTF-8");
compoundStack = new Stack<String>();
} catch (UnsupportedEncodingException ex) {
throw new RuntimeException(ex);
}
}
@Override
public void writeByte(byte b, String tag) throws IOException {
printBeginEnvelope(tag);
stream.print("<ex:i1>");
stream.print(Byte.toString(b));
stream.print("</ex:i1>");
printEndEnvelope(tag);
}
@Override
public void writeBool(boolean b, String tag) throws IOException {
printBeginEnvelope(tag);
stream.print("<boolean>");
stream.print(b ? "1" : "0");
stream.print("</boolean>");
printEndEnvelope(tag);
}
@Override
public void writeInt(int i, String tag) throws IOException {
printBeginEnvelope(tag);
stream.print("<i4>");
stream.print(Integer.toString(i));
stream.print("</i4>");
printEndEnvelope(tag);
}
@Override
public void writeLong(long l, String tag) throws IOException {
printBeginEnvelope(tag);
stream.print("<ex:i8>");
stream.print(Long.toString(l));
stream.print("</ex:i8>");
printEndEnvelope(tag);
}
@Override
public void writeFloat(float f, String tag) throws IOException {
printBeginEnvelope(tag);
stream.print("<ex:float>");
stream.print(Float.toString(f));
stream.print("</ex:float>");
printEndEnvelope(tag);
}
@Override
public void writeDouble(double d, String tag) throws IOException {
printBeginEnvelope(tag);
stream.print("<double>");
stream.print(Double.toString(d));
stream.print("</double>");
printEndEnvelope(tag);
}
@Override
public void writeString(String s, String tag) throws IOException {
printBeginEnvelope(tag);
stream.print("<string>");
stream.print(Utils.toXMLString(s));
stream.print("</string>");
printEndEnvelope(tag);
}
@Override
public void writeBuffer(Buffer buf, String tag)
throws IOException {
printBeginEnvelope(tag);
stream.print("<string>");
stream.print(Utils.toXMLBuffer(buf));
stream.print("</string>");
printEndEnvelope(tag);
}
@Override
public void startRecord(Record r, String tag) throws IOException {
insideRecord(tag);
stream.print("<struct>\n");
addIndent();
}
@Override
public void endRecord(Record r, String tag) throws IOException {
closeIndent();
putIndent();
stream.print("</struct>");
outsideRecord(tag);
}
@Override
public void startVector(ArrayList v, String tag) throws IOException {
insideVector(tag);
stream.print("<array>\n");
addIndent();
}
@Override
public void endVector(ArrayList v, String tag) throws IOException {
closeIndent();
putIndent();
stream.print("</array>");
outsideVector(tag);
}
@Override
public void startMap(TreeMap v, String tag) throws IOException {
insideMap(tag);
stream.print("<array>\n");
addIndent();
}
@Override
public void endMap(TreeMap v, String tag) throws IOException {
closeIndent();
putIndent();
stream.print("</array>");
outsideMap(tag);
}
}

View File

@ -0,0 +1,72 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.compiler;
import java.util.ArrayList;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Iterator;
/**
* C Code generator front-end for Hadoop record I/O.
*/
class CGenerator extends CodeGenerator {
CGenerator() {
}
/**
* Generate C code. This method only creates the requested file(s)
* and spits-out file-level elements (such as include statements etc.)
* record-level code is generated by JRecord.
*/
@Override
void genCode(String name, ArrayList<JFile> ilist,
ArrayList<JRecord> rlist, String destDir, ArrayList<String> options)
throws IOException {
name = new File(destDir, (new File(name)).getName()).getAbsolutePath();
FileWriter cc = new FileWriter(name+".c");
try {
FileWriter hh = new FileWriter(name+".h");
try {
hh.write("#ifndef __"+name.toUpperCase().replace('.','_')+"__\n");
hh.write("#define __"+name.toUpperCase().replace('.','_')+"__\n");
hh.write("#include \"recordio.h\"\n");
for (Iterator<JFile> iter = ilist.iterator(); iter.hasNext();) {
hh.write("#include \""+iter.next().getName()+".h\"\n");
}
cc.write("#include \""+name+".h\"\n");
/*
for (Iterator<JRecord> iter = rlist.iterator(); iter.hasNext();) {
iter.next().genCppCode(hh, cc);
}
*/
hh.write("#endif //"+name.toUpperCase().replace('.','_')+"__\n");
} finally {
hh.close();
}
} finally {
cc.close();
}
}
}

View File

@ -0,0 +1,105 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.compiler;
import java.util.ArrayList;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* A wrapper around StringBuffer that automatically does indentation
*
* @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class CodeBuffer {
static private ArrayList<Character> startMarkers = new ArrayList<Character>();
static private ArrayList<Character> endMarkers = new ArrayList<Character>();
static {
addMarkers('{', '}');
addMarkers('(', ')');
}
static void addMarkers(char ch1, char ch2) {
startMarkers.add(ch1);
endMarkers.add(ch2);
}
private int level = 0;
private int numSpaces = 2;
private boolean firstChar = true;
private StringBuffer sb;
/** Creates a new instance of CodeBuffer */
CodeBuffer() {
this(2, "");
}
CodeBuffer(String s) {
this(2, s);
}
CodeBuffer(int numSpaces, String s) {
sb = new StringBuffer();
this.numSpaces = numSpaces;
this.append(s);
}
void append(String s) {
int length = s.length();
for (int idx = 0; idx < length; idx++) {
char ch = s.charAt(idx);
append(ch);
}
}
void append(char ch) {
if (endMarkers.contains(ch)) {
level--;
}
if (firstChar) {
for (int idx = 0; idx < level; idx++) {
for (int num = 0; num < numSpaces; num++) {
rawAppend(' ');
}
}
}
rawAppend(ch);
firstChar = false;
if (startMarkers.contains(ch)) {
level++;
}
if (ch == '\n') {
firstChar = true;
}
}
private void rawAppend(char ch) {
sb.append(ch);
}
@Override
public String toString() {
return sb.toString();
}
}

View File

@ -0,0 +1,53 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.compiler;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
/**
* CodeGenerator is a Factory and a base class for Hadoop Record I/O translators.
* Different translators register creation methods with this factory.
*/
abstract class CodeGenerator {
private static HashMap<String, CodeGenerator> generators =
new HashMap<String, CodeGenerator>();
static {
register("c", new CGenerator());
register("c++", new CppGenerator());
register("java", new JavaGenerator());
}
static void register(String lang, CodeGenerator gen) {
generators.put(lang, gen);
}
static CodeGenerator get(String lang) {
return generators.get(lang);
}
abstract void genCode(String file,
ArrayList<JFile> inclFiles,
ArrayList<JRecord> records,
String destDir,
ArrayList<String> options) throws IOException;
}

View File

@ -0,0 +1,48 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.compiler;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* const definitions for Record I/O compiler
*
* @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class Consts {
/** Cannot create a new instance */
private Consts() {
}
// prefix to use for variables in generated classes
public static final String RIO_PREFIX = "_rio_";
// other vars used in generated classes
public static final String RTI_VAR = RIO_PREFIX + "recTypeInfo";
public static final String RTI_FILTER = RIO_PREFIX + "rtiFilter";
public static final String RTI_FILTER_FIELDS = RIO_PREFIX + "rtiFilterFields";
public static final String RECORD_OUTPUT = RIO_PREFIX + "a";
public static final String RECORD_INPUT = RIO_PREFIX + "a";
public static final String TAG = RIO_PREFIX + "tag";
}

View File

@ -0,0 +1,75 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.compiler;
import java.util.ArrayList;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Iterator;
/**
* C++ Code generator front-end for Hadoop record I/O.
*/
class CppGenerator extends CodeGenerator {
CppGenerator() {
}
/**
* Generate C++ code. This method only creates the requested file(s)
* and spits-out file-level elements (such as include statements etc.)
* record-level code is generated by JRecord.
*/
@Override
void genCode(String name, ArrayList<JFile> ilist,
ArrayList<JRecord> rlist, String destDir, ArrayList<String> options)
throws IOException {
name = new File(destDir, (new File(name)).getName()).getAbsolutePath();
FileWriter cc = new FileWriter(name+".cc");
try {
FileWriter hh = new FileWriter(name+".hh");
try {
String fileName = (new File(name)).getName();
hh.write("#ifndef __"+fileName.toUpperCase().replace('.','_')+"__\n");
hh.write("#define __"+fileName.toUpperCase().replace('.','_')+"__\n");
hh.write("#include \"recordio.hh\"\n");
hh.write("#include \"recordTypeInfo.hh\"\n");
for (Iterator<JFile> iter = ilist.iterator(); iter.hasNext();) {
hh.write("#include \""+iter.next().getName()+".hh\"\n");
}
cc.write("#include \""+fileName+".hh\"\n");
cc.write("#include \"utils.hh\"\n");
for (Iterator<JRecord> iter = rlist.iterator(); iter.hasNext();) {
iter.next().genCppCode(hh, cc, options);
}
hh.write("#endif //"+fileName.toUpperCase().replace('.','_')+"__\n");
} finally {
hh.close();
}
} finally {
cc.close();
}
}
}

View File

@ -0,0 +1,106 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.compiler;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class JBoolean extends JType {
class JavaBoolean extends JType.JavaType {
JavaBoolean() {
super("boolean", "Bool", "Boolean", "TypeID.RIOType.BOOL");
}
@Override
void genCompareTo(CodeBuffer cb, String fname, String other) {
cb.append(Consts.RIO_PREFIX + "ret = ("+fname+" == "+other+")? 0 : ("+
fname+"?1:-1);\n");
}
@Override
String getTypeIDObjectString() {
return "org.apache.hadoop.record.meta.TypeID.BoolTypeID";
}
@Override
void genHashCode(CodeBuffer cb, String fname) {
cb.append(Consts.RIO_PREFIX + "ret = ("+fname+")?0:1;\n");
}
// In Binary format, boolean is written as byte. true = 1, false = 0
@Override
void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
cb.append("{\n");
cb.append("if ("+l+"<1) {\n");
cb.append("throw new java.io.IOException(\"Boolean is exactly 1 byte."+
" Provided buffer is smaller.\");\n");
cb.append("}\n");
cb.append(s+"++; "+l+"--;\n");
cb.append("}\n");
}
// In Binary format, boolean is written as byte. true = 1, false = 0
@Override
void genCompareBytes(CodeBuffer cb) {
cb.append("{\n");
cb.append("if (l1<1 || l2<1) {\n");
cb.append("throw new java.io.IOException(\"Boolean is exactly 1 byte."+
" Provided buffer is smaller.\");\n");
cb.append("}\n");
cb.append("if (b1[s1] != b2[s2]) {\n");
cb.append("return (b1[s1]<b2[s2])? -1 : 0;\n");
cb.append("}\n");
cb.append("s1++; s2++; l1--; l2--;\n");
cb.append("}\n");
}
}
class CppBoolean extends CppType {
CppBoolean() {
super("bool");
}
@Override
String getTypeIDObjectString() {
return "new ::hadoop::TypeID(::hadoop::RIOTYPE_BOOL)";
}
}
/** Creates a new instance of JBoolean */
public JBoolean() {
setJavaType(new JavaBoolean());
setCppType(new CppBoolean());
setCType(new CType());
}
@Override
String getSignature() {
return "z";
}
}

View File

@ -0,0 +1,120 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.compiler;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* Code generator for "buffer" type.
*
* @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class JBuffer extends JCompType {
class JavaBuffer extends JavaCompType {
JavaBuffer() {
super("org.apache.hadoop.record.Buffer", "Buffer",
"org.apache.hadoop.record.Buffer", "TypeID.RIOType.BUFFER");
}
@Override
String getTypeIDObjectString() {
return "org.apache.hadoop.record.meta.TypeID.BufferTypeID";
}
@Override
void genCompareTo(CodeBuffer cb, String fname, String other) {
cb.append(Consts.RIO_PREFIX + "ret = "+fname+".compareTo("+other+");\n");
}
@Override
void genEquals(CodeBuffer cb, String fname, String peer) {
cb.append(Consts.RIO_PREFIX + "ret = "+fname+".equals("+peer+");\n");
}
@Override
void genHashCode(CodeBuffer cb, String fname) {
cb.append(Consts.RIO_PREFIX + "ret = "+fname+".hashCode();\n");
}
@Override
void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
cb.append("{\n");
cb.append("int i = org.apache.hadoop.record.Utils.readVInt("+
b+", "+s+");\n");
cb.append("int z = org.apache.hadoop.record.Utils.getVIntSize(i);\n");
cb.append(s+" += z+i; "+l+" -= (z+i);\n");
cb.append("}\n");
}
@Override
void genCompareBytes(CodeBuffer cb) {
cb.append("{\n");
cb.append("int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1);\n");
cb.append("int i2 = org.apache.hadoop.record.Utils.readVInt(b2, s2);\n");
cb.append("int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);\n");
cb.append("int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);\n");
cb.append("s1+=z1; s2+=z2; l1-=z1; l2-=z2;\n");
cb.append("int r1 = org.apache.hadoop.record.Utils.compareBytes(b1,s1,i1,b2,s2,i2);\n");
cb.append("if (r1 != 0) { return (r1<0)?-1:0; }\n");
cb.append("s1+=i1; s2+=i2; l1-=i1; l1-=i2;\n");
cb.append("}\n");
}
}
class CppBuffer extends CppCompType {
CppBuffer() {
super(" ::std::string");
}
@Override
void genGetSet(CodeBuffer cb, String fname) {
cb.append("virtual const "+getType()+"& get"+toCamelCase(fname)+"() const {\n");
cb.append("return "+fname+";\n");
cb.append("}\n");
cb.append("virtual "+getType()+"& get"+toCamelCase(fname)+"() {\n");
cb.append("return "+fname+";\n");
cb.append("}\n");
}
@Override
String getTypeIDObjectString() {
return "new ::hadoop::TypeID(::hadoop::RIOTYPE_BUFFER)";
}
}
/** Creates a new instance of JBuffer */
public JBuffer() {
setJavaType(new JavaBuffer());
setCppType(new CppBuffer());
setCType(new CCompType());
}
@Override
String getSignature() {
return "B";
}
}

View File

@ -0,0 +1,93 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.compiler;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* Code generator for "byte" type.
*
* @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class JByte extends JType {
class JavaByte extends JavaType {
JavaByte() {
super("byte", "Byte", "Byte", "TypeID.RIOType.BYTE");
}
@Override
String getTypeIDObjectString() {
return "org.apache.hadoop.record.meta.TypeID.ByteTypeID";
}
@Override
void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
cb.append("{\n");
cb.append("if ("+l+"<1) {\n");
cb.append("throw new java.io.IOException(\"Byte is exactly 1 byte."+
" Provided buffer is smaller.\");\n");
cb.append("}\n");
cb.append(s+"++; "+l+"--;\n");
cb.append("}\n");
}
@Override
void genCompareBytes(CodeBuffer cb) {
cb.append("{\n");
cb.append("if (l1<1 || l2<1) {\n");
cb.append("throw new java.io.IOException(\"Byte is exactly 1 byte."+
" Provided buffer is smaller.\");\n");
cb.append("}\n");
cb.append("if (b1[s1] != b2[s2]) {\n");
cb.append("return (b1[s1]<b2[s2])?-1:0;\n");
cb.append("}\n");
cb.append("s1++; s2++; l1--; l2--;\n");
cb.append("}\n");
}
}
class CppByte extends CppType {
CppByte() {
super("int8_t");
}
@Override
String getTypeIDObjectString() {
return "new ::hadoop::TypeID(::hadoop::RIOTYPE_BYTE)";
}
}
public JByte() {
setJavaType(new JavaByte());
setCppType(new CppByte());
setCType(new CType());
}
@Override
String getSignature() {
return "b";
}
}

View File

@ -0,0 +1,80 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.compiler;
/**
* Abstract base class for all the "compound" types such as ustring,
* buffer, vector, map, and record.
*
* @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
*/
@Deprecated
abstract class JCompType extends JType {
abstract class JavaCompType extends JavaType {
JavaCompType(String type, String suffix, String wrapper,
String typeIDByteString) {
super(type, suffix, wrapper, typeIDByteString);
}
@Override
void genCompareTo(CodeBuffer cb, String fname, String other) {
cb.append(Consts.RIO_PREFIX + "ret = "+fname+".compareTo("+other+");\n");
}
@Override
void genEquals(CodeBuffer cb, String fname, String peer) {
cb.append(Consts.RIO_PREFIX + "ret = "+fname+".equals("+peer+");\n");
}
@Override
void genHashCode(CodeBuffer cb, String fname) {
cb.append(Consts.RIO_PREFIX + "ret = "+fname+".hashCode();\n");
}
@Override
void genClone(CodeBuffer cb, String fname) {
cb.append(Consts.RIO_PREFIX + "other."+fname+" = ("+getType()+") this."+
fname+".clone();\n");
}
}
abstract class CppCompType extends CppType {
CppCompType(String type) {
super(type);
}
@Override
void genGetSet(CodeBuffer cb, String fname) {
cb.append("virtual const "+getType()+"& get"+toCamelCase(fname)+"() const {\n");
cb.append("return "+fname+";\n");
cb.append("}\n");
cb.append("virtual "+getType()+"& get"+toCamelCase(fname)+"() {\n");
cb.append("return "+fname+";\n");
cb.append("}\n");
}
}
class CCompType extends CType {
}
}

View File

@ -0,0 +1,102 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.compiler;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class JDouble extends JType {
class JavaDouble extends JavaType {
JavaDouble() {
super("double", "Double", "Double", "TypeID.RIOType.DOUBLE");
}
@Override
String getTypeIDObjectString() {
return "org.apache.hadoop.record.meta.TypeID.DoubleTypeID";
}
@Override
void genHashCode(CodeBuffer cb, String fname) {
String tmp = "Double.doubleToLongBits("+fname+")";
cb.append(Consts.RIO_PREFIX + "ret = (int)("+tmp+"^("+tmp+">>>32));\n");
}
@Override
void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
cb.append("{\n");
cb.append("if ("+l+"<8) {\n");
cb.append("throw new java.io.IOException(\"Double is exactly 8 bytes."+
" Provided buffer is smaller.\");\n");
cb.append("}\n");
cb.append(s+"+=8; "+l+"-=8;\n");
cb.append("}\n");
}
@Override
void genCompareBytes(CodeBuffer cb) {
cb.append("{\n");
cb.append("if (l1<8 || l2<8) {\n");
cb.append("throw new java.io.IOException(\"Double is exactly 8 bytes."+
" Provided buffer is smaller.\");\n");
cb.append("}\n");
cb.append("double d1 = org.apache.hadoop.record.Utils.readDouble(b1, s1);\n");
cb.append("double d2 = org.apache.hadoop.record.Utils.readDouble(b2, s2);\n");
cb.append("if (d1 != d2) {\n");
cb.append("return ((d1-d2) < 0) ? -1 : 0;\n");
cb.append("}\n");
cb.append("s1+=8; s2+=8; l1-=8; l2-=8;\n");
cb.append("}\n");
}
}
class CppDouble extends CppType {
CppDouble() {
super("double");
}
@Override
String getTypeIDObjectString() {
return "new ::hadoop::TypeID(::hadoop::RIOTYPE_DOUBLE)";
}
}
/** Creates a new instance of JDouble */
public JDouble() {
setJavaType(new JavaDouble());
setCppType(new CppDouble());
setCType(new CType());
}
@Override
String getSignature() {
return "d";
}
}

View File

@ -0,0 +1,52 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.compiler;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* A thin wrappper around record field.
*
* @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class JField<T> {
private String name;
private T type;
/**
* Creates a new instance of JField
*/
public JField(String name, T type) {
this.type = type;
this.name = name;
}
String getName() {
return name;
}
T getType() {
return type;
}
}

View File

@ -0,0 +1,78 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.compiler;
import java.io.IOException;
import java.util.ArrayList;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* Container for the Hadoop Record DDL.
* The main components of the file are filename, list of included files,
* and records defined in that file.
*
* @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class JFile {
/** Possibly full name of the file */
private String mName;
/** Ordered list of included files */
private ArrayList<JFile> mInclFiles;
/** Ordered list of records declared in this file */
private ArrayList<JRecord> mRecords;
/** Creates a new instance of JFile
*
* @param name possibly full pathname to the file
* @param inclFiles included files (as JFile)
* @param recList List of records defined within this file
*/
public JFile(String name, ArrayList<JFile> inclFiles,
ArrayList<JRecord> recList) {
mName = name;
mInclFiles = inclFiles;
mRecords = recList;
}
/** Strip the other pathname components and return the basename */
String getName() {
int idx = mName.lastIndexOf('/');
return (idx > 0) ? mName.substring(idx) : mName;
}
/** Generate record code in given language. Language should be all
* lowercase.
*/
public int genCode(String language, String destDir, ArrayList<String> options)
throws IOException {
CodeGenerator gen = CodeGenerator.get(language);
if (gen != null) {
gen.genCode(mName, mInclFiles, mRecords, destDir, options);
} else {
System.err.println("Cannnot recognize language:"+language);
return 1;
}
return 0;
}
}

View File

@ -0,0 +1,99 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.compiler;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class JFloat extends JType {
class JavaFloat extends JavaType {
JavaFloat() {
super("float", "Float", "Float", "TypeID.RIOType.FLOAT");
}
@Override
String getTypeIDObjectString() {
return "org.apache.hadoop.record.meta.TypeID.FloatTypeID";
}
@Override
void genHashCode(CodeBuffer cb, String fname) {
cb.append(Consts.RIO_PREFIX + "ret = Float.floatToIntBits("+fname+");\n");
}
@Override
void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
cb.append("{\n");
cb.append("if ("+l+"<4) {\n");
cb.append("throw new java.io.IOException(\"Float is exactly 4 bytes."+
" Provided buffer is smaller.\");\n");
cb.append("}\n");
cb.append(s+"+=4; "+l+"-=4;\n");
cb.append("}\n");
}
@Override
void genCompareBytes(CodeBuffer cb) {
cb.append("{\n");
cb.append("if (l1<4 || l2<4) {\n");
cb.append("throw new java.io.IOException(\"Float is exactly 4 bytes."+
" Provided buffer is smaller.\");\n");
cb.append("}\n");
cb.append("float f1 = org.apache.hadoop.record.Utils.readFloat(b1, s1);\n");
cb.append("float f2 = org.apache.hadoop.record.Utils.readFloat(b2, s2);\n");
cb.append("if (f1 != f2) {\n");
cb.append("return ((f1-f2) < 0) ? -1 : 0;\n");
cb.append("}\n");
cb.append("s1+=4; s2+=4; l1-=4; l2-=4;\n");
cb.append("}\n");
}
}
class CppFloat extends CppType {
CppFloat() {
super("float");
}
@Override
String getTypeIDObjectString() {
return "new ::hadoop::TypeID(::hadoop::RIOTYPE_FLOAT)";
}
}
/** Creates a new instance of JFloat */
public JFloat() {
setJavaType(new JavaFloat());
setCppType(new CppFloat());
setCType(new CType());
}
@Override
String getSignature() {
return "f";
}
}

View File

@ -0,0 +1,93 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.compiler;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* Code generator for "int" type
*
* @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class JInt extends JType {
class JavaInt extends JavaType {
JavaInt() {
super("int", "Int", "Integer", "TypeID.RIOType.INT");
}
@Override
String getTypeIDObjectString() {
return "org.apache.hadoop.record.meta.TypeID.IntTypeID";
}
@Override
void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
cb.append("{\n");
cb.append("int i = org.apache.hadoop.record.Utils.readVInt("+b+", "+s+");\n");
cb.append("int z = org.apache.hadoop.record.Utils.getVIntSize(i);\n");
cb.append(s+"+=z; "+l+"-=z;\n");
cb.append("}\n");
}
@Override
void genCompareBytes(CodeBuffer cb) {
cb.append("{\n");
cb.append("int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1);\n");
cb.append("int i2 = org.apache.hadoop.record.Utils.readVInt(b2, s2);\n");
cb.append("if (i1 != i2) {\n");
cb.append("return ((i1-i2) < 0) ? -1 : 0;\n");
cb.append("}\n");
cb.append("int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);\n");
cb.append("int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);\n");
cb.append("s1+=z1; s2+=z2; l1-=z1; l2-=z2;\n");
cb.append("}\n");
}
}
class CppInt extends CppType {
CppInt() {
super("int32_t");
}
@Override
String getTypeIDObjectString() {
return "new ::hadoop::TypeID(::hadoop::RIOTYPE_INT)";
}
}
/** Creates a new instance of JInt */
public JInt() {
setJavaType(new JavaInt());
setCppType(new CppInt());
setCType(new CType());
}
@Override
String getSignature() {
return "i";
}
}

View File

@ -0,0 +1,98 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.compiler;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* Code generator for "long" type
*
* @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class JLong extends JType {
class JavaLong extends JavaType {
JavaLong() {
super("long", "Long", "Long", "TypeID.RIOType.LONG");
}
@Override
String getTypeIDObjectString() {
return "org.apache.hadoop.record.meta.TypeID.LongTypeID";
}
@Override
void genHashCode(CodeBuffer cb, String fname) {
cb.append(Consts.RIO_PREFIX + "ret = (int) ("+fname+"^("+
fname+">>>32));\n");
}
@Override
void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
cb.append("{\n");
cb.append("long i = org.apache.hadoop.record.Utils.readVLong("+b+", "+s+");\n");
cb.append("int z = org.apache.hadoop.record.Utils.getVIntSize(i);\n");
cb.append(s+"+=z; "+l+"-=z;\n");
cb.append("}\n");
}
@Override
void genCompareBytes(CodeBuffer cb) {
cb.append("{\n");
cb.append("long i1 = org.apache.hadoop.record.Utils.readVLong(b1, s1);\n");
cb.append("long i2 = org.apache.hadoop.record.Utils.readVLong(b2, s2);\n");
cb.append("if (i1 != i2) {\n");
cb.append("return ((i1-i2) < 0) ? -1 : 0;\n");
cb.append("}\n");
cb.append("int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);\n");
cb.append("int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);\n");
cb.append("s1+=z1; s2+=z2; l1-=z1; l2-=z2;\n");
cb.append("}\n");
}
}
class CppLong extends CppType {
CppLong() {
super("int64_t");
}
@Override
String getTypeIDObjectString() {
return "new ::hadoop::TypeID(::hadoop::RIOTYPE_LONG)";
}
}
/** Creates a new instance of JLong */
public JLong() {
setJavaType(new JavaLong());
setCppType(new CppLong());
setCType(new CType());
}
@Override
String getSignature() {
return "l";
}
}

View File

@ -0,0 +1,246 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.compiler;
import java.util.Map;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class JMap extends JCompType {
static private int level = 0;
static private String getLevel() { return Integer.toString(level); }
static private void incrLevel() { level++; }
static private void decrLevel() { level--; }
static private String getId(String id) { return id+getLevel(); }
private JType keyType;
private JType valueType;
class JavaMap extends JavaCompType {
JType.JavaType key;
JType.JavaType value;
JavaMap(JType.JavaType key, JType.JavaType value) {
super("java.util.TreeMap<"+key.getWrapperType()+","+value.getWrapperType()+">",
"Map",
"java.util.TreeMap<"+key.getWrapperType()+","+value.getWrapperType()+">",
"TypeID.RIOType.MAP");
this.key = key;
this.value = value;
}
@Override
String getTypeIDObjectString() {
return "new org.apache.hadoop.record.meta.MapTypeID(" +
key.getTypeIDObjectString() + ", " +
value.getTypeIDObjectString() + ")";
}
@Override
void genSetRTIFilter(CodeBuffer cb, Map<String, Integer> nestedStructMap) {
key.genSetRTIFilter(cb, nestedStructMap);
value.genSetRTIFilter(cb, nestedStructMap);
}
@Override
void genCompareTo(CodeBuffer cb, String fname, String other) {
String setType = "java.util.Set<"+key.getWrapperType()+"> ";
String iterType = "java.util.Iterator<"+key.getWrapperType()+"> ";
cb.append("{\n");
cb.append(setType+getId(Consts.RIO_PREFIX + "set1")+" = "+
fname+".keySet();\n");
cb.append(setType+getId(Consts.RIO_PREFIX + "set2")+" = "+
other+".keySet();\n");
cb.append(iterType+getId(Consts.RIO_PREFIX + "miter1")+" = "+
getId(Consts.RIO_PREFIX + "set1")+".iterator();\n");
cb.append(iterType+getId(Consts.RIO_PREFIX + "miter2")+" = "+
getId(Consts.RIO_PREFIX + "set2")+".iterator();\n");
cb.append("for(; "+getId(Consts.RIO_PREFIX + "miter1")+".hasNext() && "+
getId(Consts.RIO_PREFIX + "miter2")+".hasNext();) {\n");
cb.append(key.getType()+" "+getId(Consts.RIO_PREFIX + "k1")+
" = "+getId(Consts.RIO_PREFIX + "miter1")+".next();\n");
cb.append(key.getType()+" "+getId(Consts.RIO_PREFIX + "k2")+
" = "+getId(Consts.RIO_PREFIX + "miter2")+".next();\n");
key.genCompareTo(cb, getId(Consts.RIO_PREFIX + "k1"),
getId(Consts.RIO_PREFIX + "k2"));
cb.append("if (" + Consts.RIO_PREFIX + "ret != 0) { return " +
Consts.RIO_PREFIX + "ret; }\n");
cb.append("}\n");
cb.append(Consts.RIO_PREFIX + "ret = ("+getId(Consts.RIO_PREFIX + "set1")+
".size() - "+getId(Consts.RIO_PREFIX + "set2")+".size());\n");
cb.append("}\n");
}
@Override
void genReadMethod(CodeBuffer cb, String fname, String tag, boolean decl) {
if (decl) {
cb.append(getType()+" "+fname+";\n");
}
cb.append("{\n");
incrLevel();
cb.append("org.apache.hadoop.record.Index " +
getId(Consts.RIO_PREFIX + "midx")+" = " +
Consts.RECORD_INPUT + ".startMap(\""+tag+"\");\n");
cb.append(fname+"=new "+getType()+"();\n");
cb.append("for (; !"+getId(Consts.RIO_PREFIX + "midx")+".done(); "+
getId(Consts.RIO_PREFIX + "midx")+".incr()) {\n");
key.genReadMethod(cb, getId(Consts.RIO_PREFIX + "k"),
getId(Consts.RIO_PREFIX + "k"), true);
value.genReadMethod(cb, getId(Consts.RIO_PREFIX + "v"),
getId(Consts.RIO_PREFIX + "v"), true);
cb.append(fname+".put("+getId(Consts.RIO_PREFIX + "k")+","+
getId(Consts.RIO_PREFIX + "v")+");\n");
cb.append("}\n");
cb.append(Consts.RECORD_INPUT + ".endMap(\""+tag+"\");\n");
decrLevel();
cb.append("}\n");
}
@Override
void genWriteMethod(CodeBuffer cb, String fname, String tag) {
String setType = "java.util.Set<java.util.Map.Entry<"+
key.getWrapperType()+","+value.getWrapperType()+">> ";
String entryType = "java.util.Map.Entry<"+
key.getWrapperType()+","+value.getWrapperType()+"> ";
String iterType = "java.util.Iterator<java.util.Map.Entry<"+
key.getWrapperType()+","+value.getWrapperType()+">> ";
cb.append("{\n");
incrLevel();
cb.append(Consts.RECORD_OUTPUT + ".startMap("+fname+",\""+tag+"\");\n");
cb.append(setType+getId(Consts.RIO_PREFIX + "es")+" = "+
fname+".entrySet();\n");
cb.append("for("+iterType+getId(Consts.RIO_PREFIX + "midx")+" = "+
getId(Consts.RIO_PREFIX + "es")+".iterator(); "+
getId(Consts.RIO_PREFIX + "midx")+".hasNext();) {\n");
cb.append(entryType+getId(Consts.RIO_PREFIX + "me")+" = "+
getId(Consts.RIO_PREFIX + "midx")+".next();\n");
cb.append(key.getType()+" "+getId(Consts.RIO_PREFIX + "k")+" = "+
getId(Consts.RIO_PREFIX + "me")+".getKey();\n");
cb.append(value.getType()+" "+getId(Consts.RIO_PREFIX + "v")+" = "+
getId(Consts.RIO_PREFIX + "me")+".getValue();\n");
key.genWriteMethod(cb, getId(Consts.RIO_PREFIX + "k"),
getId(Consts.RIO_PREFIX + "k"));
value.genWriteMethod(cb, getId(Consts.RIO_PREFIX + "v"),
getId(Consts.RIO_PREFIX + "v"));
cb.append("}\n");
cb.append(Consts.RECORD_OUTPUT + ".endMap("+fname+",\""+tag+"\");\n");
cb.append("}\n");
decrLevel();
}
@Override
void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
cb.append("{\n");
incrLevel();
cb.append("int "+getId("mi")+
" = org.apache.hadoop.record.Utils.readVInt("+b+", "+s+");\n");
cb.append("int "+getId("mz")+
" = org.apache.hadoop.record.Utils.getVIntSize("+getId("mi")+");\n");
cb.append(s+"+="+getId("mz")+"; "+l+"-="+getId("mz")+";\n");
cb.append("for (int "+getId("midx")+" = 0; "+getId("midx")+
" < "+getId("mi")+"; "+getId("midx")+"++) {");
key.genSlurpBytes(cb, b, s, l);
value.genSlurpBytes(cb, b, s, l);
cb.append("}\n");
decrLevel();
cb.append("}\n");
}
@Override
void genCompareBytes(CodeBuffer cb) {
cb.append("{\n");
incrLevel();
cb.append("int "+getId("mi1")+
" = org.apache.hadoop.record.Utils.readVInt(b1, s1);\n");
cb.append("int "+getId("mi2")+
" = org.apache.hadoop.record.Utils.readVInt(b2, s2);\n");
cb.append("int "+getId("mz1")+
" = org.apache.hadoop.record.Utils.getVIntSize("+getId("mi1")+");\n");
cb.append("int "+getId("mz2")+
" = org.apache.hadoop.record.Utils.getVIntSize("+getId("mi2")+");\n");
cb.append("s1+="+getId("mz1")+"; s2+="+getId("mz2")+
"; l1-="+getId("mz1")+"; l2-="+getId("mz2")+";\n");
cb.append("for (int "+getId("midx")+" = 0; "+getId("midx")+
" < "+getId("mi1")+" && "+getId("midx")+" < "+getId("mi2")+
"; "+getId("midx")+"++) {");
key.genCompareBytes(cb);
value.genSlurpBytes(cb, "b1", "s1", "l1");
value.genSlurpBytes(cb, "b2", "s2", "l2");
cb.append("}\n");
cb.append("if ("+getId("mi1")+" != "+getId("mi2")+
") { return ("+getId("mi1")+"<"+getId("mi2")+")?-1:0; }\n");
decrLevel();
cb.append("}\n");
}
}
class CppMap extends CppCompType {
JType.CppType key;
JType.CppType value;
CppMap(JType.CppType key, JType.CppType value) {
super("::std::map< "+key.getType()+", "+ value.getType()+" >");
this.key = key;
this.value = value;
}
@Override
String getTypeIDObjectString() {
return "new ::hadoop::MapTypeID(" +
key.getTypeIDObjectString() + ", " +
value.getTypeIDObjectString() + ")";
}
@Override
void genSetRTIFilter(CodeBuffer cb) {
key.genSetRTIFilter(cb);
value.genSetRTIFilter(cb);
}
}
/** Creates a new instance of JMap */
public JMap(JType t1, JType t2) {
setJavaType(new JavaMap(t1.getJavaType(), t2.getJavaType()));
setCppType(new CppMap(t1.getCppType(), t2.getCppType()));
setCType(new CType());
keyType = t1;
valueType = t2;
}
@Override
String getSignature() {
return "{" + keyType.getSignature() + valueType.getSignature() +"}";
}
}

View File

@ -0,0 +1,822 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.compiler;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.*;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class JRecord extends JCompType {
class JavaRecord extends JavaCompType {
private String fullName;
private String name;
private String module;
private ArrayList<JField<JavaType>> fields =
new ArrayList<JField<JavaType>>();
JavaRecord(String name, ArrayList<JField<JType>> flist) {
super(name, "Record", name, "TypeID.RIOType.STRUCT");
this.fullName = name;
int idx = name.lastIndexOf('.');
this.name = name.substring(idx+1);
this.module = name.substring(0, idx);
for (Iterator<JField<JType>> iter = flist.iterator(); iter.hasNext();) {
JField<JType> f = iter.next();
fields.add(new JField<JavaType>(f.getName(), f.getType().getJavaType()));
}
}
@Override
String getTypeIDObjectString() {
return "new org.apache.hadoop.record.meta.StructTypeID(" +
fullName + ".getTypeInfo())";
}
@Override
void genSetRTIFilter(CodeBuffer cb, Map<String, Integer> nestedStructMap) {
// ignore, if we'ev already set the type filter for this record
if (!nestedStructMap.containsKey(fullName)) {
// we set the RTI filter here
cb.append(fullName + ".setTypeFilter(rti.getNestedStructTypeInfo(\""+
name + "\"));\n");
nestedStructMap.put(fullName, null);
}
}
// for each typeInfo in the filter, we see if there's a similar one in the record.
// Since we store typeInfos in ArrayLists, thsi search is O(n squared). We do it faster
// if we also store a map (of TypeInfo to index), but since setupRtiFields() is called
// only once when deserializing, we're sticking with the former, as the code is easier.
void genSetupRtiFields(CodeBuffer cb) {
cb.append("private static void setupRtiFields()\n{\n");
cb.append("if (null == " + Consts.RTI_FILTER + ") return;\n");
cb.append("// we may already have done this\n");
cb.append("if (null != " + Consts.RTI_FILTER_FIELDS + ") return;\n");
cb.append("int " + Consts.RIO_PREFIX + "i, " + Consts.RIO_PREFIX + "j;\n");
cb.append(Consts.RTI_FILTER_FIELDS + " = new int [" +
Consts.RIO_PREFIX + "rtiFilter.getFieldTypeInfos().size()];\n");
cb.append("for (" + Consts.RIO_PREFIX + "i=0; " + Consts.RIO_PREFIX + "i<"+
Consts.RTI_FILTER_FIELDS + ".length; " + Consts.RIO_PREFIX + "i++) {\n");
cb.append(Consts.RTI_FILTER_FIELDS + "[" + Consts.RIO_PREFIX + "i] = 0;\n");
cb.append("}\n");
cb.append("java.util.Iterator<org.apache.hadoop.record.meta." +
"FieldTypeInfo> " + Consts.RIO_PREFIX + "itFilter = " +
Consts.RIO_PREFIX + "rtiFilter.getFieldTypeInfos().iterator();\n");
cb.append(Consts.RIO_PREFIX + "i=0;\n");
cb.append("while (" + Consts.RIO_PREFIX + "itFilter.hasNext()) {\n");
cb.append("org.apache.hadoop.record.meta.FieldTypeInfo " +
Consts.RIO_PREFIX + "tInfoFilter = " +
Consts.RIO_PREFIX + "itFilter.next();\n");
cb.append("java.util.Iterator<org.apache.hadoop.record.meta." +
"FieldTypeInfo> " + Consts.RIO_PREFIX + "it = " + Consts.RTI_VAR +
".getFieldTypeInfos().iterator();\n");
cb.append(Consts.RIO_PREFIX + "j=1;\n");
cb.append("while (" + Consts.RIO_PREFIX + "it.hasNext()) {\n");
cb.append("org.apache.hadoop.record.meta.FieldTypeInfo " +
Consts.RIO_PREFIX + "tInfo = " + Consts.RIO_PREFIX + "it.next();\n");
cb.append("if (" + Consts.RIO_PREFIX + "tInfo.equals(" +
Consts.RIO_PREFIX + "tInfoFilter)) {\n");
cb.append(Consts.RTI_FILTER_FIELDS + "[" + Consts.RIO_PREFIX + "i] = " +
Consts.RIO_PREFIX + "j;\n");
cb.append("break;\n");
cb.append("}\n");
cb.append(Consts.RIO_PREFIX + "j++;\n");
cb.append("}\n");
/*int ct = 0;
for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
ct++;
JField<JavaType> jf = i.next();
JavaType type = jf.getType();
String name = jf.getName();
if (ct != 1) {
cb.append("else ");
}
type.genRtiFieldCondition(cb, name, ct);
}
if (ct != 0) {
cb.append("else {\n");
cb.append("rtiFilterFields[i] = 0;\n");
cb.append("}\n");
}*/
cb.append(Consts.RIO_PREFIX + "i++;\n");
cb.append("}\n");
cb.append("}\n");
}
@Override
void genReadMethod(CodeBuffer cb, String fname, String tag, boolean decl) {
if (decl) {
cb.append(fullName+" "+fname+";\n");
}
cb.append(fname+"= new "+fullName+"();\n");
cb.append(fname+".deserialize(" + Consts.RECORD_INPUT + ",\""+tag+"\");\n");
}
@Override
void genWriteMethod(CodeBuffer cb, String fname, String tag) {
cb.append(fname+".serialize(" + Consts.RECORD_OUTPUT + ",\""+tag+"\");\n");
}
@Override
void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
cb.append("{\n");
cb.append("int r = "+fullName+
".Comparator.slurpRaw("+b+","+s+","+l+");\n");
cb.append(s+"+=r; "+l+"-=r;\n");
cb.append("}\n");
}
@Override
void genCompareBytes(CodeBuffer cb) {
cb.append("{\n");
cb.append("int r1 = "+fullName+
".Comparator.compareRaw(b1,s1,l1,b2,s2,l2);\n");
cb.append("if (r1 <= 0) { return r1; }\n");
cb.append("s1+=r1; s2+=r1; l1-=r1; l2-=r1;\n");
cb.append("}\n");
}
void genCode(String destDir, ArrayList<String> options) throws IOException {
String pkg = module;
String pkgpath = pkg.replaceAll("\\.", "/");
File pkgdir = new File(destDir, pkgpath);
final File jfile = new File(pkgdir, name+".java");
if (!pkgdir.exists()) {
// create the pkg directory
boolean ret = pkgdir.mkdirs();
if (!ret) {
throw new IOException("Cannnot create directory: "+pkgpath);
}
} else if (!pkgdir.isDirectory()) {
// not a directory
throw new IOException(pkgpath+" is not a directory.");
}
CodeBuffer cb = new CodeBuffer();
cb.append("// File generated by hadoop record compiler. Do not edit.\n");
cb.append("package "+module+";\n\n");
cb.append("public class "+name+
" extends org.apache.hadoop.record.Record {\n");
// type information declarations
cb.append("private static final " +
"org.apache.hadoop.record.meta.RecordTypeInfo " +
Consts.RTI_VAR + ";\n");
cb.append("private static " +
"org.apache.hadoop.record.meta.RecordTypeInfo " +
Consts.RTI_FILTER + ";\n");
cb.append("private static int[] " + Consts.RTI_FILTER_FIELDS + ";\n");
// static init for type information
cb.append("static {\n");
cb.append(Consts.RTI_VAR + " = " +
"new org.apache.hadoop.record.meta.RecordTypeInfo(\"" +
name + "\");\n");
for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
JField<JavaType> jf = i.next();
String name = jf.getName();
JavaType type = jf.getType();
type.genStaticTypeInfo(cb, name);
}
cb.append("}\n\n");
// field definitions
for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
JField<JavaType> jf = i.next();
String name = jf.getName();
JavaType type = jf.getType();
type.genDecl(cb, name);
}
// default constructor
cb.append("public "+name+"() { }\n");
// constructor
cb.append("public "+name+"(\n");
int fIdx = 0;
for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext(); fIdx++) {
JField<JavaType> jf = i.next();
String name = jf.getName();
JavaType type = jf.getType();
type.genConstructorParam(cb, name);
cb.append((!i.hasNext())?"":",\n");
}
cb.append(") {\n");
fIdx = 0;
for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext(); fIdx++) {
JField<JavaType> jf = i.next();
String name = jf.getName();
JavaType type = jf.getType();
type.genConstructorSet(cb, name);
}
cb.append("}\n");
// getter/setter for type info
cb.append("public static org.apache.hadoop.record.meta.RecordTypeInfo"
+ " getTypeInfo() {\n");
cb.append("return " + Consts.RTI_VAR + ";\n");
cb.append("}\n");
cb.append("public static void setTypeFilter("
+ "org.apache.hadoop.record.meta.RecordTypeInfo rti) {\n");
cb.append("if (null == rti) return;\n");
cb.append(Consts.RTI_FILTER + " = rti;\n");
cb.append(Consts.RTI_FILTER_FIELDS + " = null;\n");
// set RTIFilter for nested structs.
// To prevent setting up the type filter for the same struct more than once,
// we use a hash map to keep track of what we've set.
Map<String, Integer> nestedStructMap = new HashMap<String, Integer>();
for (JField<JavaType> jf : fields) {
JavaType type = jf.getType();
type.genSetRTIFilter(cb, nestedStructMap);
}
cb.append("}\n");
// setupRtiFields()
genSetupRtiFields(cb);
// getters/setters for member variables
for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
JField<JavaType> jf = i.next();
String name = jf.getName();
JavaType type = jf.getType();
type.genGetSet(cb, name);
}
// serialize()
cb.append("public void serialize("+
"final org.apache.hadoop.record.RecordOutput " +
Consts.RECORD_OUTPUT + ", final String " + Consts.TAG + ")\n"+
"throws java.io.IOException {\n");
cb.append(Consts.RECORD_OUTPUT + ".startRecord(this," + Consts.TAG + ");\n");
for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
JField<JavaType> jf = i.next();
String name = jf.getName();
JavaType type = jf.getType();
type.genWriteMethod(cb, name, name);
}
cb.append(Consts.RECORD_OUTPUT + ".endRecord(this," + Consts.TAG+");\n");
cb.append("}\n");
// deserializeWithoutFilter()
cb.append("private void deserializeWithoutFilter("+
"final org.apache.hadoop.record.RecordInput " +
Consts.RECORD_INPUT + ", final String " + Consts.TAG + ")\n"+
"throws java.io.IOException {\n");
cb.append(Consts.RECORD_INPUT + ".startRecord(" + Consts.TAG + ");\n");
for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
JField<JavaType> jf = i.next();
String name = jf.getName();
JavaType type = jf.getType();
type.genReadMethod(cb, name, name, false);
}
cb.append(Consts.RECORD_INPUT + ".endRecord(" + Consts.TAG+");\n");
cb.append("}\n");
// deserialize()
cb.append("public void deserialize(final " +
"org.apache.hadoop.record.RecordInput " +
Consts.RECORD_INPUT + ", final String " + Consts.TAG + ")\n"+
"throws java.io.IOException {\n");
cb.append("if (null == " + Consts.RTI_FILTER + ") {\n");
cb.append("deserializeWithoutFilter(" + Consts.RECORD_INPUT + ", " +
Consts.TAG + ");\n");
cb.append("return;\n");
cb.append("}\n");
cb.append("// if we're here, we need to read based on version info\n");
cb.append(Consts.RECORD_INPUT + ".startRecord(" + Consts.TAG + ");\n");
cb.append("setupRtiFields();\n");
cb.append("for (int " + Consts.RIO_PREFIX + "i=0; " + Consts.RIO_PREFIX +
"i<" + Consts.RTI_FILTER + ".getFieldTypeInfos().size(); " +
Consts.RIO_PREFIX + "i++) {\n");
int ct = 0;
for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
JField<JavaType> jf = i.next();
String name = jf.getName();
JavaType type = jf.getType();
ct++;
if (1 != ct) {
cb.append("else ");
}
cb.append("if (" + ct + " == " + Consts.RTI_FILTER_FIELDS + "[" +
Consts.RIO_PREFIX + "i]) {\n");
type.genReadMethod(cb, name, name, false);
cb.append("}\n");
}
if (0 != ct) {
cb.append("else {\n");
cb.append("java.util.ArrayList<"
+ "org.apache.hadoop.record.meta.FieldTypeInfo> typeInfos = "
+ "(java.util.ArrayList<"
+ "org.apache.hadoop.record.meta.FieldTypeInfo>)"
+ "(" + Consts.RTI_FILTER + ".getFieldTypeInfos());\n");
cb.append("org.apache.hadoop.record.meta.Utils.skip(" +
Consts.RECORD_INPUT + ", " + "typeInfos.get(" + Consts.RIO_PREFIX +
"i).getFieldID(), typeInfos.get(" +
Consts.RIO_PREFIX + "i).getTypeID());\n");
cb.append("}\n");
}
cb.append("}\n");
cb.append(Consts.RECORD_INPUT + ".endRecord(" + Consts.TAG+");\n");
cb.append("}\n");
// compareTo()
cb.append("public int compareTo (final Object " + Consts.RIO_PREFIX +
"peer_) throws ClassCastException {\n");
cb.append("if (!(" + Consts.RIO_PREFIX + "peer_ instanceof "+name+")) {\n");
cb.append("throw new ClassCastException(\"Comparing different types of records.\");\n");
cb.append("}\n");
cb.append(name+" " + Consts.RIO_PREFIX + "peer = ("+name+") " +
Consts.RIO_PREFIX + "peer_;\n");
cb.append("int " + Consts.RIO_PREFIX + "ret = 0;\n");
for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
JField<JavaType> jf = i.next();
String name = jf.getName();
JavaType type = jf.getType();
type.genCompareTo(cb, name, Consts.RIO_PREFIX + "peer."+name);
cb.append("if (" + Consts.RIO_PREFIX + "ret != 0) return " +
Consts.RIO_PREFIX + "ret;\n");
}
cb.append("return " + Consts.RIO_PREFIX + "ret;\n");
cb.append("}\n");
// equals()
cb.append("public boolean equals(final Object " + Consts.RIO_PREFIX +
"peer_) {\n");
cb.append("if (!(" + Consts.RIO_PREFIX + "peer_ instanceof "+name+")) {\n");
cb.append("return false;\n");
cb.append("}\n");
cb.append("if (" + Consts.RIO_PREFIX + "peer_ == this) {\n");
cb.append("return true;\n");
cb.append("}\n");
cb.append(name+" " + Consts.RIO_PREFIX + "peer = ("+name+") " +
Consts.RIO_PREFIX + "peer_;\n");
cb.append("boolean " + Consts.RIO_PREFIX + "ret = false;\n");
for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
JField<JavaType> jf = i.next();
String name = jf.getName();
JavaType type = jf.getType();
type.genEquals(cb, name, Consts.RIO_PREFIX + "peer."+name);
cb.append("if (!" + Consts.RIO_PREFIX + "ret) return " +
Consts.RIO_PREFIX + "ret;\n");
}
cb.append("return " + Consts.RIO_PREFIX + "ret;\n");
cb.append("}\n");
// clone()
cb.append("public Object clone() throws CloneNotSupportedException {\n");
cb.append(name+" " + Consts.RIO_PREFIX + "other = new "+name+"();\n");
for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
JField<JavaType> jf = i.next();
String name = jf.getName();
JavaType type = jf.getType();
type.genClone(cb, name);
}
cb.append("return " + Consts.RIO_PREFIX + "other;\n");
cb.append("}\n");
cb.append("public int hashCode() {\n");
cb.append("int " + Consts.RIO_PREFIX + "result = 17;\n");
cb.append("int " + Consts.RIO_PREFIX + "ret;\n");
for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
JField<JavaType> jf = i.next();
String name = jf.getName();
JavaType type = jf.getType();
type.genHashCode(cb, name);
cb.append(Consts.RIO_PREFIX + "result = 37*" + Consts.RIO_PREFIX +
"result + " + Consts.RIO_PREFIX + "ret;\n");
}
cb.append("return " + Consts.RIO_PREFIX + "result;\n");
cb.append("}\n");
cb.append("public static String signature() {\n");
cb.append("return \""+getSignature()+"\";\n");
cb.append("}\n");
cb.append("public static class Comparator extends"+
" org.apache.hadoop.record.RecordComparator {\n");
cb.append("public Comparator() {\n");
cb.append("super("+name+".class);\n");
cb.append("}\n");
cb.append("static public int slurpRaw(byte[] b, int s, int l) {\n");
cb.append("try {\n");
cb.append("int os = s;\n");
for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
JField<JavaType> jf = i.next();
String name = jf.getName();
JavaType type = jf.getType();
type.genSlurpBytes(cb, "b","s","l");
}
cb.append("return (os - s);\n");
cb.append("} catch(java.io.IOException e) {\n");
cb.append("throw new RuntimeException(e);\n");
cb.append("}\n");
cb.append("}\n");
cb.append("static public int compareRaw(byte[] b1, int s1, int l1,\n");
cb.append(" byte[] b2, int s2, int l2) {\n");
cb.append("try {\n");
cb.append("int os1 = s1;\n");
for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
JField<JavaType> jf = i.next();
String name = jf.getName();
JavaType type = jf.getType();
type.genCompareBytes(cb);
}
cb.append("return (os1 - s1);\n");
cb.append("} catch(java.io.IOException e) {\n");
cb.append("throw new RuntimeException(e);\n");
cb.append("}\n");
cb.append("}\n");
cb.append("public int compare(byte[] b1, int s1, int l1,\n");
cb.append(" byte[] b2, int s2, int l2) {\n");
cb.append("int ret = compareRaw(b1,s1,l1,b2,s2,l2);\n");
cb.append("return (ret == -1)? -1 : ((ret==0)? 1 : 0);");
cb.append("}\n");
cb.append("}\n\n");
cb.append("static {\n");
cb.append("org.apache.hadoop.record.RecordComparator.define("
+name+".class, new Comparator());\n");
cb.append("}\n");
cb.append("}\n");
FileWriter jj = new FileWriter(jfile);
try {
jj.write(cb.toString());
} finally {
jj.close();
}
}
}
class CppRecord extends CppCompType {
private String fullName;
private String name;
private String module;
private ArrayList<JField<CppType>> fields =
new ArrayList<JField<CppType>>();
CppRecord(String name, ArrayList<JField<JType>> flist) {
super(name.replaceAll("\\.","::"));
this.fullName = name.replaceAll("\\.", "::");
int idx = name.lastIndexOf('.');
this.name = name.substring(idx+1);
this.module = name.substring(0, idx).replaceAll("\\.", "::");
for (Iterator<JField<JType>> iter = flist.iterator(); iter.hasNext();) {
JField<JType> f = iter.next();
fields.add(new JField<CppType>(f.getName(), f.getType().getCppType()));
}
}
@Override
String getTypeIDObjectString() {
return "new ::hadoop::StructTypeID(" +
fullName + "::getTypeInfo().getFieldTypeInfos())";
}
String genDecl(String fname) {
return " "+name+" "+fname+";\n";
}
@Override
void genSetRTIFilter(CodeBuffer cb) {
// we set the RTI filter here
cb.append(fullName + "::setTypeFilter(rti.getNestedStructTypeInfo(\""+
name + "\"));\n");
}
void genSetupRTIFields(CodeBuffer cb) {
cb.append("void " + fullName + "::setupRtiFields() {\n");
cb.append("if (NULL == p" + Consts.RTI_FILTER + ") return;\n");
cb.append("if (NULL != p" + Consts.RTI_FILTER_FIELDS + ") return;\n");
cb.append("p" + Consts.RTI_FILTER_FIELDS + " = new int[p" +
Consts.RTI_FILTER + "->getFieldTypeInfos().size()];\n");
cb.append("for (unsigned int " + Consts.RIO_PREFIX + "i=0; " +
Consts.RIO_PREFIX + "i<p" + Consts.RTI_FILTER +
"->getFieldTypeInfos().size(); " + Consts.RIO_PREFIX + "i++) {\n");
cb.append("p" + Consts.RTI_FILTER_FIELDS + "[" + Consts.RIO_PREFIX +
"i] = 0;\n");
cb.append("}\n");
cb.append("for (unsigned int " + Consts.RIO_PREFIX + "i=0; " +
Consts.RIO_PREFIX + "i<p" + Consts.RTI_FILTER +
"->getFieldTypeInfos().size(); " + Consts.RIO_PREFIX + "i++) {\n");
cb.append("for (unsigned int " + Consts.RIO_PREFIX + "j=0; " +
Consts.RIO_PREFIX + "j<p" + Consts.RTI_VAR +
"->getFieldTypeInfos().size(); " + Consts.RIO_PREFIX + "j++) {\n");
cb.append("if (*(p" + Consts.RTI_FILTER + "->getFieldTypeInfos()[" +
Consts.RIO_PREFIX + "i]) == *(p" + Consts.RTI_VAR +
"->getFieldTypeInfos()[" + Consts.RIO_PREFIX + "j])) {\n");
cb.append("p" + Consts.RTI_FILTER_FIELDS + "[" + Consts.RIO_PREFIX +
"i] = " + Consts.RIO_PREFIX + "j+1;\n");
cb.append("break;\n");
cb.append("}\n");
cb.append("}\n");
cb.append("}\n");
cb.append("}\n");
}
void genCode(FileWriter hh, FileWriter cc, ArrayList<String> options)
throws IOException {
CodeBuffer hb = new CodeBuffer();
String[] ns = module.split("::");
for (int i = 0; i < ns.length; i++) {
hb.append("namespace "+ns[i]+" {\n");
}
hb.append("class "+name+" : public ::hadoop::Record {\n");
hb.append("private:\n");
for (Iterator<JField<CppType>> i = fields.iterator(); i.hasNext();) {
JField<CppType> jf = i.next();
String name = jf.getName();
CppType type = jf.getType();
type.genDecl(hb, name);
}
// type info vars
hb.append("static ::hadoop::RecordTypeInfo* p" + Consts.RTI_VAR + ";\n");
hb.append("static ::hadoop::RecordTypeInfo* p" + Consts.RTI_FILTER + ";\n");
hb.append("static int* p" + Consts.RTI_FILTER_FIELDS + ";\n");
hb.append("static ::hadoop::RecordTypeInfo* setupTypeInfo();\n");
hb.append("static void setupRtiFields();\n");
hb.append("virtual void deserializeWithoutFilter(::hadoop::IArchive& " +
Consts.RECORD_INPUT + ", const char* " + Consts.TAG + ");\n");
hb.append("public:\n");
hb.append("static const ::hadoop::RecordTypeInfo& getTypeInfo() " +
"{return *p" + Consts.RTI_VAR + ";}\n");
hb.append("static void setTypeFilter(const ::hadoop::RecordTypeInfo& rti);\n");
hb.append("static void setTypeFilter(const ::hadoop::RecordTypeInfo* prti);\n");
hb.append("virtual void serialize(::hadoop::OArchive& " +
Consts.RECORD_OUTPUT + ", const char* " + Consts.TAG + ") const;\n");
hb.append("virtual void deserialize(::hadoop::IArchive& " +
Consts.RECORD_INPUT + ", const char* " + Consts.TAG + ");\n");
hb.append("virtual const ::std::string& type() const;\n");
hb.append("virtual const ::std::string& signature() const;\n");
hb.append("virtual bool operator<(const "+name+"& peer_) const;\n");
hb.append("virtual bool operator==(const "+name+"& peer_) const;\n");
hb.append("virtual ~"+name+"() {};\n");
for (Iterator<JField<CppType>> i = fields.iterator(); i.hasNext();) {
JField<CppType> jf = i.next();
String name = jf.getName();
CppType type = jf.getType();
type.genGetSet(hb, name);
}
hb.append("}; // end record "+name+"\n");
for (int i=ns.length-1; i>=0; i--) {
hb.append("} // end namespace "+ns[i]+"\n");
}
hh.write(hb.toString());
CodeBuffer cb = new CodeBuffer();
// initialize type info vars
cb.append("::hadoop::RecordTypeInfo* " + fullName + "::p" +
Consts.RTI_VAR + " = " + fullName + "::setupTypeInfo();\n");
cb.append("::hadoop::RecordTypeInfo* " + fullName + "::p" +
Consts.RTI_FILTER + " = NULL;\n");
cb.append("int* " + fullName + "::p" +
Consts.RTI_FILTER_FIELDS + " = NULL;\n\n");
// setupTypeInfo()
cb.append("::hadoop::RecordTypeInfo* "+fullName+"::setupTypeInfo() {\n");
cb.append("::hadoop::RecordTypeInfo* p = new ::hadoop::RecordTypeInfo(\"" +
name + "\");\n");
for (Iterator<JField<CppType>> i = fields.iterator(); i.hasNext();) {
JField<CppType> jf = i.next();
String name = jf.getName();
CppType type = jf.getType();
type.genStaticTypeInfo(cb, name);
}
cb.append("return p;\n");
cb.append("}\n");
// setTypeFilter()
cb.append("void "+fullName+"::setTypeFilter(const " +
"::hadoop::RecordTypeInfo& rti) {\n");
cb.append("if (NULL != p" + Consts.RTI_FILTER + ") {\n");
cb.append("delete p" + Consts.RTI_FILTER + ";\n");
cb.append("}\n");
cb.append("p" + Consts.RTI_FILTER + " = new ::hadoop::RecordTypeInfo(rti);\n");
cb.append("if (NULL != p" + Consts.RTI_FILTER_FIELDS + ") {\n");
cb.append("delete p" + Consts.RTI_FILTER_FIELDS + ";\n");
cb.append("}\n");
cb.append("p" + Consts.RTI_FILTER_FIELDS + " = NULL;\n");
// set RTIFilter for nested structs. We may end up with multiple lines that
// do the same thing, if the same struct is nested in more than one field,
// but that's OK.
for (Iterator<JField<CppType>> i = fields.iterator(); i.hasNext();) {
JField<CppType> jf = i.next();
CppType type = jf.getType();
type.genSetRTIFilter(cb);
}
cb.append("}\n");
// setTypeFilter()
cb.append("void "+fullName+"::setTypeFilter(const " +
"::hadoop::RecordTypeInfo* prti) {\n");
cb.append("if (NULL != prti) {\n");
cb.append("setTypeFilter(*prti);\n");
cb.append("}\n");
cb.append("}\n");
// setupRtiFields()
genSetupRTIFields(cb);
// serialize()
cb.append("void "+fullName+"::serialize(::hadoop::OArchive& " +
Consts.RECORD_OUTPUT + ", const char* " + Consts.TAG + ") const {\n");
cb.append(Consts.RECORD_OUTPUT + ".startRecord(*this," +
Consts.TAG + ");\n");
for (Iterator<JField<CppType>> i = fields.iterator(); i.hasNext();) {
JField<CppType> jf = i.next();
String name = jf.getName();
CppType type = jf.getType();
if (type instanceof JBuffer.CppBuffer) {
cb.append(Consts.RECORD_OUTPUT + ".serialize("+name+","+name+
".length(),\""+name+"\");\n");
} else {
cb.append(Consts.RECORD_OUTPUT + ".serialize("+name+",\""+
name+"\");\n");
}
}
cb.append(Consts.RECORD_OUTPUT + ".endRecord(*this," + Consts.TAG + ");\n");
cb.append("return;\n");
cb.append("}\n");
// deserializeWithoutFilter()
cb.append("void "+fullName+"::deserializeWithoutFilter(::hadoop::IArchive& " +
Consts.RECORD_INPUT + ", const char* " + Consts.TAG + ") {\n");
cb.append(Consts.RECORD_INPUT + ".startRecord(*this," +
Consts.TAG + ");\n");
for (Iterator<JField<CppType>> i = fields.iterator(); i.hasNext();) {
JField<CppType> jf = i.next();
String name = jf.getName();
CppType type = jf.getType();
if (type instanceof JBuffer.CppBuffer) {
cb.append("{\nsize_t len=0; " + Consts.RECORD_INPUT + ".deserialize("+
name+",len,\""+name+"\");\n}\n");
} else {
cb.append(Consts.RECORD_INPUT + ".deserialize("+name+",\""+
name+"\");\n");
}
}
cb.append(Consts.RECORD_INPUT + ".endRecord(*this," + Consts.TAG + ");\n");
cb.append("return;\n");
cb.append("}\n");
// deserialize()
cb.append("void "+fullName+"::deserialize(::hadoop::IArchive& " +
Consts.RECORD_INPUT + ", const char* " + Consts.TAG + ") {\n");
cb.append("if (NULL == p" + Consts.RTI_FILTER + ") {\n");
cb.append("deserializeWithoutFilter(" + Consts.RECORD_INPUT + ", " +
Consts.TAG + ");\n");
cb.append("return;\n");
cb.append("}\n");
cb.append("// if we're here, we need to read based on version info\n");
cb.append(Consts.RECORD_INPUT + ".startRecord(*this," +
Consts.TAG + ");\n");
cb.append("setupRtiFields();\n");
cb.append("for (unsigned int " + Consts.RIO_PREFIX + "i=0; " +
Consts.RIO_PREFIX + "i<p" + Consts.RTI_FILTER +
"->getFieldTypeInfos().size(); " + Consts.RIO_PREFIX + "i++) {\n");
int ct = 0;
for (Iterator<JField<CppType>> i = fields.iterator(); i.hasNext();) {
JField<CppType> jf = i.next();
String name = jf.getName();
CppType type = jf.getType();
ct++;
if (1 != ct) {
cb.append("else ");
}
cb.append("if (" + ct + " == p" + Consts.RTI_FILTER_FIELDS + "[" +
Consts.RIO_PREFIX + "i]) {\n");
if (type instanceof JBuffer.CppBuffer) {
cb.append("{\nsize_t len=0; " + Consts.RECORD_INPUT + ".deserialize("+
name+",len,\""+name+"\");\n}\n");
} else {
cb.append(Consts.RECORD_INPUT + ".deserialize("+name+",\""+
name+"\");\n");
}
cb.append("}\n");
}
if (0 != ct) {
cb.append("else {\n");
cb.append("const std::vector< ::hadoop::FieldTypeInfo* >& typeInfos = p" +
Consts.RTI_FILTER + "->getFieldTypeInfos();\n");
cb.append("::hadoop::Utils::skip(" + Consts.RECORD_INPUT +
", typeInfos[" + Consts.RIO_PREFIX + "i]->getFieldID()->c_str()" +
", *(typeInfos[" + Consts.RIO_PREFIX + "i]->getTypeID()));\n");
cb.append("}\n");
}
cb.append("}\n");
cb.append(Consts.RECORD_INPUT + ".endRecord(*this, " + Consts.TAG+");\n");
cb.append("}\n");
// operator <
cb.append("bool "+fullName+"::operator< (const "+fullName+"& peer_) const {\n");
cb.append("return (1\n");
for (Iterator<JField<CppType>> i = fields.iterator(); i.hasNext();) {
JField<CppType> jf = i.next();
String name = jf.getName();
cb.append("&& ("+name+" < peer_."+name+")\n");
}
cb.append(");\n");
cb.append("}\n");
cb.append("bool "+fullName+"::operator== (const "+fullName+"& peer_) const {\n");
cb.append("return (1\n");
for (Iterator<JField<CppType>> i = fields.iterator(); i.hasNext();) {
JField<CppType> jf = i.next();
String name = jf.getName();
cb.append("&& ("+name+" == peer_."+name+")\n");
}
cb.append(");\n");
cb.append("}\n");
cb.append("const ::std::string&"+fullName+"::type() const {\n");
cb.append("static const ::std::string type_(\""+name+"\");\n");
cb.append("return type_;\n");
cb.append("}\n");
cb.append("const ::std::string&"+fullName+"::signature() const {\n");
cb.append("static const ::std::string sig_(\""+getSignature()+"\");\n");
cb.append("return sig_;\n");
cb.append("}\n");
cc.write(cb.toString());
}
}
class CRecord extends CCompType {
}
private String signature;
/**
* Creates a new instance of JRecord
*/
public JRecord(String name, ArrayList<JField<JType>> flist) {
setJavaType(new JavaRecord(name, flist));
setCppType(new CppRecord(name, flist));
setCType(new CRecord());
// precompute signature
int idx = name.lastIndexOf('.');
String recName = name.substring(idx+1);
StringBuilder sb = new StringBuilder();
sb.append("L").append(recName).append("(");
for (Iterator<JField<JType>> i = flist.iterator(); i.hasNext();) {
String s = i.next().getType().getSignature();
sb.append(s);
}
sb.append(")");
signature = sb.toString();
}
@Override
String getSignature() {
return signature;
}
void genCppCode(FileWriter hh, FileWriter cc, ArrayList<String> options)
throws IOException {
((CppRecord)getCppType()).genCode(hh, cc, options);
}
void genJavaCode(String destDir, ArrayList<String> options)
throws IOException {
((JavaRecord)getJavaType()).genCode(destDir, options);
}
}

View File

@ -0,0 +1,96 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.compiler;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class JString extends JCompType {
class JavaString extends JavaCompType {
JavaString() {
super("String", "String", "String", "TypeID.RIOType.STRING");
}
@Override
String getTypeIDObjectString() {
return "org.apache.hadoop.record.meta.TypeID.StringTypeID";
}
@Override
void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
cb.append("{\n");
cb.append("int i = org.apache.hadoop.record.Utils.readVInt("+b+", "+s+");\n");
cb.append("int z = org.apache.hadoop.record.Utils.getVIntSize(i);\n");
cb.append(s+"+=(z+i); "+l+"-= (z+i);\n");
cb.append("}\n");
}
@Override
void genCompareBytes(CodeBuffer cb) {
cb.append("{\n");
cb.append("int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1);\n");
cb.append("int i2 = org.apache.hadoop.record.Utils.readVInt(b2, s2);\n");
cb.append("int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);\n");
cb.append("int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);\n");
cb.append("s1+=z1; s2+=z2; l1-=z1; l2-=z2;\n");
cb.append("int r1 = org.apache.hadoop.record.Utils.compareBytes(b1,s1,i1,b2,s2,i2);\n");
cb.append("if (r1 != 0) { return (r1<0)?-1:0; }\n");
cb.append("s1+=i1; s2+=i2; l1-=i1; l1-=i2;\n");
cb.append("}\n");
}
@Override
void genClone(CodeBuffer cb, String fname) {
cb.append(Consts.RIO_PREFIX + "other."+fname+" = this."+fname+";\n");
}
}
class CppString extends CppCompType {
CppString() {
super("::std::string");
}
@Override
String getTypeIDObjectString() {
return "new ::hadoop::TypeID(::hadoop::RIOTYPE_STRING)";
}
}
/** Creates a new instance of JString */
public JString() {
setJavaType(new JavaString());
setCppType(new CppString());
setCType(new CCompType());
}
@Override
String getSignature() {
return "s";
}
}

View File

@ -0,0 +1,230 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.compiler;
import java.util.Map;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* Abstract Base class for all types supported by Hadoop Record I/O.
*
* @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
abstract public class JType {
static String toCamelCase(String name) {
char firstChar = name.charAt(0);
if (Character.isLowerCase(firstChar)) {
return ""+Character.toUpperCase(firstChar) + name.substring(1);
}
return name;
}
JavaType javaType;
CppType cppType;
CType cType;
abstract class JavaType {
private String name;
private String methodSuffix;
private String wrapper;
private String typeIDByteString; // points to TypeID.RIOType
JavaType(String javaname,
String suffix,
String wrapper,
String typeIDByteString) {
this.name = javaname;
this.methodSuffix = suffix;
this.wrapper = wrapper;
this.typeIDByteString = typeIDByteString;
}
void genDecl(CodeBuffer cb, String fname) {
cb.append("private "+name+" "+fname+";\n");
}
void genStaticTypeInfo(CodeBuffer cb, String fname) {
cb.append(Consts.RTI_VAR + ".addField(\"" + fname + "\", " +
getTypeIDObjectString() + ");\n");
}
abstract String getTypeIDObjectString();
void genSetRTIFilter(CodeBuffer cb, Map<String, Integer> nestedStructMap) {
// do nothing by default
return;
}
/*void genRtiFieldCondition(CodeBuffer cb, String fname, int ct) {
cb.append("if ((tInfo.fieldID.equals(\"" + fname + "\")) && (typeVal ==" +
" org.apache.hadoop.record.meta." + getTypeIDByteString() + ")) {\n");
cb.append("rtiFilterFields[i] = " + ct + ";\n");
cb.append("}\n");
}
void genRtiNestedFieldCondition(CodeBuffer cb, String varName, int ct) {
cb.append("if (" + varName + ".getElementTypeID().getTypeVal() == " +
"org.apache.hadoop.record.meta." + getTypeIDByteString() +
") {\n");
cb.append("rtiFilterFields[i] = " + ct + ";\n");
cb.append("}\n");
}*/
void genConstructorParam(CodeBuffer cb, String fname) {
cb.append("final "+name+" "+fname);
}
void genGetSet(CodeBuffer cb, String fname) {
cb.append("public "+name+" get"+toCamelCase(fname)+"() {\n");
cb.append("return "+fname+";\n");
cb.append("}\n");
cb.append("public void set"+toCamelCase(fname)+"(final "+name+" "+fname+") {\n");
cb.append("this."+fname+"="+fname+";\n");
cb.append("}\n");
}
String getType() {
return name;
}
String getWrapperType() {
return wrapper;
}
String getMethodSuffix() {
return methodSuffix;
}
String getTypeIDByteString() {
return typeIDByteString;
}
void genWriteMethod(CodeBuffer cb, String fname, String tag) {
cb.append(Consts.RECORD_OUTPUT + ".write"+methodSuffix +
"("+fname+",\""+tag+"\");\n");
}
void genReadMethod(CodeBuffer cb, String fname, String tag, boolean decl) {
if (decl) {
cb.append(name+" "+fname+";\n");
}
cb.append(fname+"=" + Consts.RECORD_INPUT + ".read" +
methodSuffix+"(\""+tag+"\");\n");
}
void genCompareTo(CodeBuffer cb, String fname, String other) {
cb.append(Consts.RIO_PREFIX + "ret = ("+fname+" == "+other+")? 0 :(("+
fname+"<"+other+")?-1:1);\n");
}
abstract void genCompareBytes(CodeBuffer cb);
abstract void genSlurpBytes(CodeBuffer cb, String b, String s, String l);
void genEquals(CodeBuffer cb, String fname, String peer) {
cb.append(Consts.RIO_PREFIX + "ret = ("+fname+"=="+peer+");\n");
}
void genHashCode(CodeBuffer cb, String fname) {
cb.append(Consts.RIO_PREFIX + "ret = (int)"+fname+";\n");
}
void genConstructorSet(CodeBuffer cb, String fname) {
cb.append("this."+fname+" = "+fname+";\n");
}
void genClone(CodeBuffer cb, String fname) {
cb.append(Consts.RIO_PREFIX + "other."+fname+" = this."+fname+";\n");
}
}
abstract class CppType {
private String name;
CppType(String cppname) {
name = cppname;
}
void genDecl(CodeBuffer cb, String fname) {
cb.append(name+" "+fname+";\n");
}
void genStaticTypeInfo(CodeBuffer cb, String fname) {
cb.append("p->addField(new ::std::string(\"" +
fname + "\"), " + getTypeIDObjectString() + ");\n");
}
void genGetSet(CodeBuffer cb, String fname) {
cb.append("virtual "+name+" get"+toCamelCase(fname)+"() const {\n");
cb.append("return "+fname+";\n");
cb.append("}\n");
cb.append("virtual void set"+toCamelCase(fname)+"("+name+" m_) {\n");
cb.append(fname+"=m_;\n");
cb.append("}\n");
}
abstract String getTypeIDObjectString();
void genSetRTIFilter(CodeBuffer cb) {
// do nothing by default
return;
}
String getType() {
return name;
}
}
class CType {
}
abstract String getSignature();
void setJavaType(JavaType jType) {
this.javaType = jType;
}
JavaType getJavaType() {
return javaType;
}
void setCppType(CppType cppType) {
this.cppType = cppType;
}
CppType getCppType() {
return cppType;
}
void setCType(CType cType) {
this.cType = cType;
}
CType getCType() {
return cType;
}
}

View File

@ -0,0 +1,214 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.compiler;
import java.util.Map;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class JVector extends JCompType {
static private int level = 0;
static private String getId(String id) { return id+getLevel(); }
static private String getLevel() { return Integer.toString(level); }
static private void incrLevel() { level++; }
static private void decrLevel() { level--; }
private JType type;
class JavaVector extends JavaCompType {
private JType.JavaType element;
JavaVector(JType.JavaType t) {
super("java.util.ArrayList<"+t.getWrapperType()+">",
"Vector", "java.util.ArrayList<"+t.getWrapperType()+">",
"TypeID.RIOType.VECTOR");
element = t;
}
@Override
String getTypeIDObjectString() {
return "new org.apache.hadoop.record.meta.VectorTypeID(" +
element.getTypeIDObjectString() + ")";
}
@Override
void genSetRTIFilter(CodeBuffer cb, Map<String, Integer> nestedStructMap) {
element.genSetRTIFilter(cb, nestedStructMap);
}
@Override
void genCompareTo(CodeBuffer cb, String fname, String other) {
cb.append("{\n");
incrLevel();
cb.append("int "+getId(Consts.RIO_PREFIX + "len1")+" = "+fname+
".size();\n");
cb.append("int "+getId(Consts.RIO_PREFIX + "len2")+" = "+other+
".size();\n");
cb.append("for(int "+getId(Consts.RIO_PREFIX + "vidx")+" = 0; "+
getId(Consts.RIO_PREFIX + "vidx")+"<"+getId(Consts.RIO_PREFIX + "len1")+
" && "+getId(Consts.RIO_PREFIX + "vidx")+"<"+
getId(Consts.RIO_PREFIX + "len2")+"; "+
getId(Consts.RIO_PREFIX + "vidx")+"++) {\n");
cb.append(element.getType()+" "+getId(Consts.RIO_PREFIX + "e1")+
" = "+fname+
".get("+getId(Consts.RIO_PREFIX + "vidx")+");\n");
cb.append(element.getType()+" "+getId(Consts.RIO_PREFIX + "e2")+
" = "+other+
".get("+getId(Consts.RIO_PREFIX + "vidx")+");\n");
element.genCompareTo(cb, getId(Consts.RIO_PREFIX + "e1"),
getId(Consts.RIO_PREFIX + "e2"));
cb.append("if (" + Consts.RIO_PREFIX + "ret != 0) { return " +
Consts.RIO_PREFIX + "ret; }\n");
cb.append("}\n");
cb.append(Consts.RIO_PREFIX + "ret = ("+getId(Consts.RIO_PREFIX + "len1")+
" - "+getId(Consts.RIO_PREFIX + "len2")+");\n");
decrLevel();
cb.append("}\n");
}
@Override
void genReadMethod(CodeBuffer cb, String fname, String tag, boolean decl) {
if (decl) {
cb.append(getType()+" "+fname+";\n");
}
cb.append("{\n");
incrLevel();
cb.append("org.apache.hadoop.record.Index "+
getId(Consts.RIO_PREFIX + "vidx")+" = " +
Consts.RECORD_INPUT + ".startVector(\""+tag+"\");\n");
cb.append(fname+"=new "+getType()+"();\n");
cb.append("for (; !"+getId(Consts.RIO_PREFIX + "vidx")+".done(); " +
getId(Consts.RIO_PREFIX + "vidx")+".incr()) {\n");
element.genReadMethod(cb, getId(Consts.RIO_PREFIX + "e"),
getId(Consts.RIO_PREFIX + "e"), true);
cb.append(fname+".add("+getId(Consts.RIO_PREFIX + "e")+");\n");
cb.append("}\n");
cb.append(Consts.RECORD_INPUT + ".endVector(\""+tag+"\");\n");
decrLevel();
cb.append("}\n");
}
@Override
void genWriteMethod(CodeBuffer cb, String fname, String tag) {
cb.append("{\n");
incrLevel();
cb.append(Consts.RECORD_OUTPUT + ".startVector("+fname+",\""+tag+"\");\n");
cb.append("int "+getId(Consts.RIO_PREFIX + "len")+" = "+fname+".size();\n");
cb.append("for(int "+getId(Consts.RIO_PREFIX + "vidx")+" = 0; " +
getId(Consts.RIO_PREFIX + "vidx")+"<"+getId(Consts.RIO_PREFIX + "len")+
"; "+getId(Consts.RIO_PREFIX + "vidx")+"++) {\n");
cb.append(element.getType()+" "+getId(Consts.RIO_PREFIX + "e")+" = "+
fname+".get("+getId(Consts.RIO_PREFIX + "vidx")+");\n");
element.genWriteMethod(cb, getId(Consts.RIO_PREFIX + "e"),
getId(Consts.RIO_PREFIX + "e"));
cb.append("}\n");
cb.append(Consts.RECORD_OUTPUT + ".endVector("+fname+",\""+tag+"\");\n");
cb.append("}\n");
decrLevel();
}
@Override
void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
cb.append("{\n");
incrLevel();
cb.append("int "+getId("vi")+
" = org.apache.hadoop.record.Utils.readVInt("+b+", "+s+");\n");
cb.append("int "+getId("vz")+
" = org.apache.hadoop.record.Utils.getVIntSize("+getId("vi")+");\n");
cb.append(s+"+="+getId("vz")+"; "+l+"-="+getId("vz")+";\n");
cb.append("for (int "+getId("vidx")+" = 0; "+getId("vidx")+
" < "+getId("vi")+"; "+getId("vidx")+"++)");
element.genSlurpBytes(cb, b, s, l);
decrLevel();
cb.append("}\n");
}
@Override
void genCompareBytes(CodeBuffer cb) {
cb.append("{\n");
incrLevel();
cb.append("int "+getId("vi1")+
" = org.apache.hadoop.record.Utils.readVInt(b1, s1);\n");
cb.append("int "+getId("vi2")+
" = org.apache.hadoop.record.Utils.readVInt(b2, s2);\n");
cb.append("int "+getId("vz1")+
" = org.apache.hadoop.record.Utils.getVIntSize("+getId("vi1")+");\n");
cb.append("int "+getId("vz2")+
" = org.apache.hadoop.record.Utils.getVIntSize("+getId("vi2")+");\n");
cb.append("s1+="+getId("vz1")+"; s2+="+getId("vz2")+
"; l1-="+getId("vz1")+"; l2-="+getId("vz2")+";\n");
cb.append("for (int "+getId("vidx")+" = 0; "+getId("vidx")+
" < "+getId("vi1")+" && "+getId("vidx")+" < "+getId("vi2")+
"; "+getId("vidx")+"++)");
element.genCompareBytes(cb);
cb.append("if ("+getId("vi1")+" != "+getId("vi2")+
") { return ("+getId("vi1")+"<"+getId("vi2")+")?-1:0; }\n");
decrLevel();
cb.append("}\n");
}
}
class CppVector extends CppCompType {
private JType.CppType element;
CppVector(JType.CppType t) {
super("::std::vector< "+t.getType()+" >");
element = t;
}
@Override
String getTypeIDObjectString() {
return "new ::hadoop::VectorTypeID(" +
element.getTypeIDObjectString() + ")";
}
@Override
void genSetRTIFilter(CodeBuffer cb) {
element.genSetRTIFilter(cb);
}
}
/** Creates a new instance of JVector */
public JVector(JType t) {
type = t;
setJavaType(new JavaVector(t.getJavaType()));
setCppType(new CppVector(t.getCppType()));
setCType(new CCompType());
}
@Override
String getSignature() {
return "[" + type.getSignature() + "]";
}
}

View File

@ -0,0 +1,51 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.compiler;
import java.util.ArrayList;
import java.io.IOException;
import java.util.Iterator;
/**
* Java Code generator front-end for Hadoop record I/O.
*/
class JavaGenerator extends CodeGenerator {
JavaGenerator() {
}
/**
* Generate Java code for records. This method is only a front-end to
* JRecord, since one file is generated for each record.
*
* @param name possibly full pathname to the file
* @param ilist included files (as JFile)
* @param rlist List of records defined within this file
* @param destDir output directory
*/
@Override
void genCode(String name, ArrayList<JFile> ilist,
ArrayList<JRecord> rlist, String destDir, ArrayList<String> options)
throws IOException {
for (Iterator<JRecord> iter = rlist.iterator(); iter.hasNext();) {
JRecord rec = iter.next();
rec.genJavaCode(destDir, options);
}
}
}

View File

@ -0,0 +1,145 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.compiler.ant;
import java.io.File;
import java.util.ArrayList;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.record.compiler.generated.Rcc;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.DirectoryScanner;
import org.apache.tools.ant.Project;
import org.apache.tools.ant.Task;
import org.apache.tools.ant.types.FileSet;
/**
* Hadoop record compiler ant Task
*<p> This task takes the given record definition files and compiles them into
* java or c++
* files. It is then up to the user to compile the generated files.
*
* <p> The task requires the <code>file</code> or the nested fileset element to be
* specified. Optional attributes are <code>language</code> (set the output
* language, default is "java"),
* <code>destdir</code> (name of the destination directory for generated java/c++
* code, default is ".") and <code>failonerror</code> (specifies error handling
* behavior. default is true).
* <p><h4>Usage</h4>
* <pre>
* &lt;recordcc
* destdir="${basedir}/gensrc"
* language="java"&gt;
* &lt;fileset include="**\/*.jr" /&gt;
* &lt;/recordcc&gt;
* </pre>
*
* @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class RccTask extends Task {
private String language = "java";
private File src;
private File dest = new File(".");
private final ArrayList<FileSet> filesets = new ArrayList<FileSet>();
private boolean failOnError = true;
/** Creates a new instance of RccTask */
public RccTask() {
}
/**
* Sets the output language option
* @param language "java"/"c++"
*/
public void setLanguage(String language) {
this.language = language;
}
/**
* Sets the record definition file attribute
* @param file record definition file
*/
public void setFile(File file) {
this.src = file;
}
/**
* Given multiple files (via fileset), set the error handling behavior
* @param flag true will throw build exception in case of failure (default)
*/
public void setFailonerror(boolean flag) {
this.failOnError = flag;
}
/**
* Sets directory where output files will be generated
* @param dir output directory
*/
public void setDestdir(File dir) {
this.dest = dir;
}
/**
* Adds a fileset that can consist of one or more files
* @param set Set of record definition files
*/
public void addFileset(FileSet set) {
filesets.add(set);
}
/**
* Invoke the Hadoop record compiler on each record definition file
*/
@Override
public void execute() throws BuildException {
if (src == null && filesets.size()==0) {
throw new BuildException("There must be a file attribute or a fileset child element");
}
if (src != null) {
doCompile(src);
}
Project myProject = getProject();
for (int i = 0; i < filesets.size(); i++) {
FileSet fs = filesets.get(i);
DirectoryScanner ds = fs.getDirectoryScanner(myProject);
File dir = fs.getDir(myProject);
String[] srcs = ds.getIncludedFiles();
for (int j = 0; j < srcs.length; j++) {
doCompile(new File(dir, srcs[j]));
}
}
}
private void doCompile(File file) throws BuildException {
String[] args = new String[5];
args[0] = "--language";
args[1] = this.language;
args[2] = "--destdir";
args[3] = this.dest.getPath();
args[4] = file.getPath();
int retVal = Rcc.driver(args);
if (retVal != 0 && failOnError) {
throw new BuildException("Hadoop record compiler returned error code "+retVal);
}
}
}

View File

@ -0,0 +1,219 @@
/* Generated By:JavaCC: Do not edit this line. ParseException.java Version 3.0 */
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.compiler.generated;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* This exception is thrown when parse errors are encountered.
* You can explicitly create objects of this exception type by
* calling the method generateParseException in the generated
* parser.
*
* You can modify this class to customize your error reporting
* mechanisms so long as you retain the public fields.
*
* @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class ParseException extends Exception {
/**
* This constructor is used by the method "generateParseException"
* in the generated parser. Calling this constructor generates
* a new object of this type with the fields "currentToken",
* "expectedTokenSequences", and "tokenImage" set. The boolean
* flag "specialConstructor" is also set to true to indicate that
* this constructor was used to create this object.
* This constructor calls its super class with the empty string
* to force the "toString" method of parent class "Throwable" to
* print the error message in the form:
* ParseException: <result of getMessage>
*/
public ParseException(Token currentTokenVal,
int[][] expectedTokenSequencesVal,
String[] tokenImageVal
)
{
super("");
specialConstructor = true;
currentToken = currentTokenVal;
expectedTokenSequences = expectedTokenSequencesVal;
tokenImage = tokenImageVal;
}
/**
* The following constructors are for use by you for whatever
* purpose you can think of. Constructing the exception in this
* manner makes the exception behave in the normal way - i.e., as
* documented in the class "Throwable". The fields "errorToken",
* "expectedTokenSequences", and "tokenImage" do not contain
* relevant information. The JavaCC generated code does not use
* these constructors.
*/
public ParseException() {
super();
specialConstructor = false;
}
public ParseException(String message) {
super(message);
specialConstructor = false;
}
/**
* This variable determines which constructor was used to create
* this object and thereby affects the semantics of the
* "getMessage" method (see below).
*/
protected boolean specialConstructor;
/**
* This is the last token that has been consumed successfully. If
* this object has been created due to a parse error, the token
* followng this token will (therefore) be the first error token.
*/
public Token currentToken;
/**
* Each entry in this array is an array of integers. Each array
* of integers represents a sequence of tokens (by their ordinal
* values) that is expected at this point of the parse.
*/
public int[][] expectedTokenSequences;
/**
* This is a reference to the "tokenImage" array of the generated
* parser within which the parse error occurred. This array is
* defined in the generated ...Constants interface.
*/
public String[] tokenImage;
/**
* This method has the standard behavior when this object has been
* created using the standard constructors. Otherwise, it uses
* "currentToken" and "expectedTokenSequences" to generate a parse
* error message and returns it. If this object has been created
* due to a parse error, and you do not catch it (it gets thrown
* from the parser), then this method is called during the printing
* of the final stack trace, and hence the correct error message
* gets displayed.
*/
@Override
public String getMessage() {
if (!specialConstructor) {
return super.getMessage();
}
StringBuffer expected = new StringBuffer();
int maxSize = 0;
for (int i = 0; i < expectedTokenSequences.length; i++) {
if (maxSize < expectedTokenSequences[i].length) {
maxSize = expectedTokenSequences[i].length;
}
for (int j = 0; j < expectedTokenSequences[i].length; j++) {
expected.append(tokenImage[expectedTokenSequences[i][j]]).append(" ");
}
if (expectedTokenSequences[i][expectedTokenSequences[i].length - 1] != 0) {
expected.append("...");
}
expected.append(eol).append(" ");
}
String retval = "Encountered \"";
Token tok = currentToken.next;
for (int i = 0; i < maxSize; i++) {
if (i != 0) retval += " ";
if (tok.kind == 0) {
retval += tokenImage[0];
break;
}
retval += add_escapes(tok.image);
tok = tok.next;
}
retval += "\" at line " + currentToken.next.beginLine + ", column " + currentToken.next.beginColumn;
retval += "." + eol;
if (expectedTokenSequences.length == 1) {
retval += "Was expecting:" + eol + " ";
} else {
retval += "Was expecting one of:" + eol + " ";
}
retval += expected.toString();
return retval;
}
/**
* The end of line string for this machine.
*/
protected String eol = System.getProperty("line.separator", "\n");
/**
* Used to convert raw characters to their escaped version
* when these raw version cannot be used as part of an ASCII
* string literal.
*/
protected String add_escapes(String str) {
StringBuffer retval = new StringBuffer();
char ch;
for (int i = 0; i < str.length(); i++) {
switch (str.charAt(i))
{
case 0 :
continue;
case '\b':
retval.append("\\b");
continue;
case '\t':
retval.append("\\t");
continue;
case '\n':
retval.append("\\n");
continue;
case '\f':
retval.append("\\f");
continue;
case '\r':
retval.append("\\r");
continue;
case '\"':
retval.append("\\\"");
continue;
case '\'':
retval.append("\\\'");
continue;
case '\\':
retval.append("\\\\");
continue;
default:
if ((ch = str.charAt(i)) < 0x20 || ch > 0x7e) {
String s = "0000" + Integer.toString(ch, 16);
retval.append("\\u" + s.substring(s.length() - 4, s.length()));
} else {
retval.append(ch);
}
continue;
}
}
return retval.toString();
}
}

View File

@ -0,0 +1,542 @@
/* Generated By:JavaCC: Do not edit this line. Rcc.java */
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.compiler.generated;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.record.compiler.*;
import java.util.ArrayList;
import java.util.Hashtable;
import java.io.File;
import java.io.FileReader;
import java.io.FileNotFoundException;
import java.io.IOException;
/**
* @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class Rcc implements RccConstants {
private static String language = "java";
private static String destDir = ".";
private static ArrayList<String> recFiles = new ArrayList<String>();
private static ArrayList<String> cmdargs = new ArrayList<String>();
private static JFile curFile;
private static Hashtable<String,JRecord> recTab;
private static String curDir = ".";
private static String curFileName;
private static String curModuleName;
public static void main(String[] args) {
System.exit(driver(args));
}
public static void usage() {
System.err.println("Usage: rcc --language [java|c++] ddl-files");
}
public static int driver(String[] args) {
for (int i=0; i<args.length; i++) {
if ("-l".equalsIgnoreCase(args[i]) ||
"--language".equalsIgnoreCase(args[i])) {
language = args[i+1].toLowerCase();
i++;
} else if ("-d".equalsIgnoreCase(args[i]) ||
"--destdir".equalsIgnoreCase(args[i])) {
destDir = args[i+1];
i++;
} else if (args[i].startsWith("-")) {
String arg = args[i].substring(1);
if (arg.startsWith("-")) {
arg = arg.substring(1);
}
cmdargs.add(arg.toLowerCase());
} else {
recFiles.add(args[i]);
}
}
if (recFiles.size() == 0) {
usage();
return 1;
}
for (int i=0; i<recFiles.size(); i++) {
curFileName = recFiles.get(i);
File file = new File(curFileName);
try {
FileReader reader = new FileReader(file);
Rcc parser = new Rcc(reader);
try {
recTab = new Hashtable<String,JRecord>();
curFile = parser.Input();
} catch (ParseException e) {
System.err.println(e.toString());
return 1;
}
try {
reader.close();
} catch (IOException e) {
}
} catch (FileNotFoundException e) {
System.err.println("File " + recFiles.get(i) +
" Not found.");
return 1;
}
try {
int retCode = curFile.genCode(language, destDir, cmdargs);
if (retCode != 0) { return retCode; }
} catch (IOException e) {
System.err.println(e.toString());
return 1;
}
}
return 0;
}
final public JFile Input() throws ParseException {
ArrayList<JFile> ilist = new ArrayList<JFile>();
ArrayList<JRecord> rlist = new ArrayList<JRecord>();
JFile i;
ArrayList<JRecord> l;
label_1:
while (true) {
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case INCLUDE_TKN:
i = Include();
ilist.add(i);
break;
case MODULE_TKN:
l = Module();
rlist.addAll(l);
break;
default:
jj_la1[0] = jj_gen;
jj_consume_token(-1);
throw new ParseException();
}
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case MODULE_TKN:
case INCLUDE_TKN:
;
break;
default:
jj_la1[1] = jj_gen;
break label_1;
}
}
jj_consume_token(0);
{if (true) return new JFile(curFileName, ilist, rlist);}
throw new Error("Missing return statement in function");
}
final public JFile Include() throws ParseException {
String fname;
Token t;
jj_consume_token(INCLUDE_TKN);
t = jj_consume_token(CSTRING_TKN);
JFile ret = null;
fname = t.image.replaceAll("^\"", "").replaceAll("\"$","");
File file = new File(curDir, fname);
String tmpDir = curDir;
String tmpFile = curFileName;
curDir = file.getParent();
curFileName = file.getName();
try {
FileReader reader = new FileReader(file);
Rcc parser = new Rcc(reader);
try {
ret = parser.Input();
System.out.println(fname + " Parsed Successfully");
} catch (ParseException e) {
System.out.println(e.toString());
System.exit(1);
}
try {
reader.close();
} catch (IOException e) {
}
} catch (FileNotFoundException e) {
System.out.println("File " + fname +
" Not found.");
System.exit(1);
}
curDir = tmpDir;
curFileName = tmpFile;
{if (true) return ret;}
throw new Error("Missing return statement in function");
}
final public ArrayList<JRecord> Module() throws ParseException {
String mName;
ArrayList<JRecord> rlist;
jj_consume_token(MODULE_TKN);
mName = ModuleName();
curModuleName = mName;
jj_consume_token(LBRACE_TKN);
rlist = RecordList();
jj_consume_token(RBRACE_TKN);
{if (true) return rlist;}
throw new Error("Missing return statement in function");
}
final public String ModuleName() throws ParseException {
String name = "";
Token t;
t = jj_consume_token(IDENT_TKN);
name += t.image;
label_2:
while (true) {
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case DOT_TKN:
;
break;
default:
jj_la1[2] = jj_gen;
break label_2;
}
jj_consume_token(DOT_TKN);
t = jj_consume_token(IDENT_TKN);
name += "." + t.image;
}
{if (true) return name;}
throw new Error("Missing return statement in function");
}
final public ArrayList<JRecord> RecordList() throws ParseException {
ArrayList<JRecord> rlist = new ArrayList<JRecord>();
JRecord r;
label_3:
while (true) {
r = Record();
rlist.add(r);
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case RECORD_TKN:
;
break;
default:
jj_la1[3] = jj_gen;
break label_3;
}
}
{if (true) return rlist;}
throw new Error("Missing return statement in function");
}
final public JRecord Record() throws ParseException {
String rname;
ArrayList<JField<JType>> flist = new ArrayList<JField<JType>>();
Token t;
JField<JType> f;
jj_consume_token(RECORD_TKN);
t = jj_consume_token(IDENT_TKN);
rname = t.image;
jj_consume_token(LBRACE_TKN);
label_4:
while (true) {
f = Field();
flist.add(f);
jj_consume_token(SEMICOLON_TKN);
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case BYTE_TKN:
case BOOLEAN_TKN:
case INT_TKN:
case LONG_TKN:
case FLOAT_TKN:
case DOUBLE_TKN:
case USTRING_TKN:
case BUFFER_TKN:
case VECTOR_TKN:
case MAP_TKN:
case IDENT_TKN:
;
break;
default:
jj_la1[4] = jj_gen;
break label_4;
}
}
jj_consume_token(RBRACE_TKN);
String fqn = curModuleName + "." + rname;
JRecord r = new JRecord(fqn, flist);
recTab.put(fqn, r);
{if (true) return r;}
throw new Error("Missing return statement in function");
}
final public JField<JType> Field() throws ParseException {
JType jt;
Token t;
jt = Type();
t = jj_consume_token(IDENT_TKN);
{if (true) return new JField<JType>(t.image, jt);}
throw new Error("Missing return statement in function");
}
final public JType Type() throws ParseException {
JType jt;
Token t;
String rname;
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case MAP_TKN:
jt = Map();
{if (true) return jt;}
break;
case VECTOR_TKN:
jt = Vector();
{if (true) return jt;}
break;
case BYTE_TKN:
jj_consume_token(BYTE_TKN);
{if (true) return new JByte();}
break;
case BOOLEAN_TKN:
jj_consume_token(BOOLEAN_TKN);
{if (true) return new JBoolean();}
break;
case INT_TKN:
jj_consume_token(INT_TKN);
{if (true) return new JInt();}
break;
case LONG_TKN:
jj_consume_token(LONG_TKN);
{if (true) return new JLong();}
break;
case FLOAT_TKN:
jj_consume_token(FLOAT_TKN);
{if (true) return new JFloat();}
break;
case DOUBLE_TKN:
jj_consume_token(DOUBLE_TKN);
{if (true) return new JDouble();}
break;
case USTRING_TKN:
jj_consume_token(USTRING_TKN);
{if (true) return new JString();}
break;
case BUFFER_TKN:
jj_consume_token(BUFFER_TKN);
{if (true) return new JBuffer();}
break;
case IDENT_TKN:
rname = ModuleName();
if (rname.indexOf('.', 0) < 0) {
rname = curModuleName + "." + rname;
}
JRecord r = recTab.get(rname);
if (r == null) {
System.out.println("Type " + rname + " not known. Exiting.");
System.exit(1);
}
{if (true) return r;}
break;
default:
jj_la1[5] = jj_gen;
jj_consume_token(-1);
throw new ParseException();
}
throw new Error("Missing return statement in function");
}
final public JMap Map() throws ParseException {
JType jt1;
JType jt2;
jj_consume_token(MAP_TKN);
jj_consume_token(LT_TKN);
jt1 = Type();
jj_consume_token(COMMA_TKN);
jt2 = Type();
jj_consume_token(GT_TKN);
{if (true) return new JMap(jt1, jt2);}
throw new Error("Missing return statement in function");
}
final public JVector Vector() throws ParseException {
JType jt;
jj_consume_token(VECTOR_TKN);
jj_consume_token(LT_TKN);
jt = Type();
jj_consume_token(GT_TKN);
{if (true) return new JVector(jt);}
throw new Error("Missing return statement in function");
}
public RccTokenManager token_source;
SimpleCharStream jj_input_stream;
public Token token, jj_nt;
private int jj_ntk;
private int jj_gen;
final private int[] jj_la1 = new int[6];
static private int[] jj_la1_0;
static private int[] jj_la1_1;
static {
jj_la1_0();
jj_la1_1();
}
private static void jj_la1_0() {
jj_la1_0 = new int[] {0x2800, 0x2800, 0x40000000, 0x1000, 0xffc000, 0xffc000,};
}
private static void jj_la1_1() {
jj_la1_1 = new int[] {0x0, 0x0, 0x0, 0x0, 0x1, 0x1,};
}
public Rcc(java.io.InputStream stream) {
this(stream, null);
}
public Rcc(java.io.InputStream stream, String encoding) {
try { jj_input_stream = new SimpleCharStream(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); }
token_source = new RccTokenManager(jj_input_stream);
token = new Token();
jj_ntk = -1;
jj_gen = 0;
for (int i = 0; i < 6; i++) jj_la1[i] = -1;
}
public void ReInit(java.io.InputStream stream) {
ReInit(stream, null);
}
public void ReInit(java.io.InputStream stream, String encoding) {
try { jj_input_stream.ReInit(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); }
token_source.ReInit(jj_input_stream);
token = new Token();
jj_ntk = -1;
jj_gen = 0;
for (int i = 0; i < 6; i++) jj_la1[i] = -1;
}
public Rcc(java.io.Reader stream) {
jj_input_stream = new SimpleCharStream(stream, 1, 1);
token_source = new RccTokenManager(jj_input_stream);
token = new Token();
jj_ntk = -1;
jj_gen = 0;
for (int i = 0; i < 6; i++) jj_la1[i] = -1;
}
public void ReInit(java.io.Reader stream) {
jj_input_stream.ReInit(stream, 1, 1);
token_source.ReInit(jj_input_stream);
token = new Token();
jj_ntk = -1;
jj_gen = 0;
for (int i = 0; i < 6; i++) jj_la1[i] = -1;
}
public Rcc(RccTokenManager tm) {
token_source = tm;
token = new Token();
jj_ntk = -1;
jj_gen = 0;
for (int i = 0; i < 6; i++) jj_la1[i] = -1;
}
public void ReInit(RccTokenManager tm) {
token_source = tm;
token = new Token();
jj_ntk = -1;
jj_gen = 0;
for (int i = 0; i < 6; i++) jj_la1[i] = -1;
}
final private Token jj_consume_token(int kind) throws ParseException {
Token oldToken;
if ((oldToken = token).next != null) token = token.next;
else token = token.next = token_source.getNextToken();
jj_ntk = -1;
if (token.kind == kind) {
jj_gen++;
return token;
}
token = oldToken;
jj_kind = kind;
throw generateParseException();
}
final public Token getNextToken() {
if (token.next != null) token = token.next;
else token = token.next = token_source.getNextToken();
jj_ntk = -1;
jj_gen++;
return token;
}
final public Token getToken(int index) {
Token t = token;
for (int i = 0; i < index; i++) {
if (t.next != null) t = t.next;
else t = t.next = token_source.getNextToken();
}
return t;
}
final private int jj_ntk() {
if ((jj_nt=token.next) == null)
return (jj_ntk = (token.next=token_source.getNextToken()).kind);
else
return (jj_ntk = jj_nt.kind);
}
private java.util.Vector<int[]> jj_expentries = new java.util.Vector<int[]>();
private int[] jj_expentry;
private int jj_kind = -1;
public ParseException generateParseException() {
jj_expentries.removeAllElements();
boolean[] la1tokens = new boolean[33];
for (int i = 0; i < 33; i++) {
la1tokens[i] = false;
}
if (jj_kind >= 0) {
la1tokens[jj_kind] = true;
jj_kind = -1;
}
for (int i = 0; i < 6; i++) {
if (jj_la1[i] == jj_gen) {
for (int j = 0; j < 32; j++) {
if ((jj_la1_0[i] & (1<<j)) != 0) {
la1tokens[j] = true;
}
if ((jj_la1_1[i] & (1<<j)) != 0) {
la1tokens[32+j] = true;
}
}
}
}
for (int i = 0; i < 33; i++) {
if (la1tokens[i]) {
jj_expentry = new int[1];
jj_expentry[0] = i;
jj_expentries.addElement(jj_expentry);
}
}
int[][] exptokseq = new int[jj_expentries.size()][];
for (int i = 0; i < jj_expentries.size(); i++) {
exptokseq[i] = jj_expentries.elementAt(i);
}
return new ParseException(token, exptokseq, tokenImage);
}
final public void enable_tracing() {
}
final public void disable_tracing() {
}
}

View File

@ -0,0 +1,97 @@
/* Generated By:JavaCC: Do not edit this line. RccConstants.java */
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.compiler.generated;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public interface RccConstants {
int EOF = 0;
int MODULE_TKN = 11;
int RECORD_TKN = 12;
int INCLUDE_TKN = 13;
int BYTE_TKN = 14;
int BOOLEAN_TKN = 15;
int INT_TKN = 16;
int LONG_TKN = 17;
int FLOAT_TKN = 18;
int DOUBLE_TKN = 19;
int USTRING_TKN = 20;
int BUFFER_TKN = 21;
int VECTOR_TKN = 22;
int MAP_TKN = 23;
int LBRACE_TKN = 24;
int RBRACE_TKN = 25;
int LT_TKN = 26;
int GT_TKN = 27;
int SEMICOLON_TKN = 28;
int COMMA_TKN = 29;
int DOT_TKN = 30;
int CSTRING_TKN = 31;
int IDENT_TKN = 32;
int DEFAULT = 0;
int WithinOneLineComment = 1;
int WithinMultiLineComment = 2;
String[] tokenImage = {
"<EOF>",
"\" \"",
"\"\\t\"",
"\"\\n\"",
"\"\\r\"",
"\"//\"",
"<token of kind 6>",
"<token of kind 7>",
"\"/*\"",
"\"*/\"",
"<token of kind 10>",
"\"module\"",
"\"class\"",
"\"include\"",
"\"byte\"",
"\"boolean\"",
"\"int\"",
"\"long\"",
"\"float\"",
"\"double\"",
"\"ustring\"",
"\"buffer\"",
"\"vector\"",
"\"map\"",
"\"{\"",
"\"}\"",
"\"<\"",
"\">\"",
"\";\"",
"\",\"",
"\".\"",
"<CSTRING_TKN>",
"<IDENT_TKN>",
};
}

View File

@ -0,0 +1,833 @@
/* Generated By:JavaCC: Do not edit this line. RccTokenManager.java */
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.compiler.generated;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class RccTokenManager implements RccConstants
{
public java.io.PrintStream debugStream = System.out;
public void setDebugStream(java.io.PrintStream ds) { debugStream = ds; }
private final int jjMoveStringLiteralDfa0_1()
{
return jjMoveNfa_1(0, 0);
}
private final void jjCheckNAdd(int state)
{
if (jjrounds[state] != jjround)
{
jjstateSet[jjnewStateCnt++] = state;
jjrounds[state] = jjround;
}
}
private final void jjAddStates(int start, int end)
{
do {
jjstateSet[jjnewStateCnt++] = jjnextStates[start];
} while (start++ != end);
}
private final void jjCheckNAddTwoStates(int state1, int state2)
{
jjCheckNAdd(state1);
jjCheckNAdd(state2);
}
private final void jjCheckNAddStates(int start, int end)
{
do {
jjCheckNAdd(jjnextStates[start]);
} while (start++ != end);
}
private final void jjCheckNAddStates(int start)
{
jjCheckNAdd(jjnextStates[start]);
jjCheckNAdd(jjnextStates[start + 1]);
}
private final int jjMoveNfa_1(int startState, int curPos)
{
int[] nextStates;
int startsAt = 0;
jjnewStateCnt = 3;
int i = 1;
jjstateSet[0] = startState;
int j, kind = 0x7fffffff;
for (;;)
{
if (++jjround == 0x7fffffff)
ReInitRounds();
if (curChar < 64)
{
long l = 1L << curChar;
MatchLoop: do
{
switch(jjstateSet[--i])
{
case 0:
if ((0x2400L & l) != 0L)
{
if (kind > 6)
kind = 6;
}
if (curChar == 13)
jjstateSet[jjnewStateCnt++] = 1;
break;
case 1:
if (curChar == 10 && kind > 6)
kind = 6;
break;
case 2:
if (curChar == 13)
jjstateSet[jjnewStateCnt++] = 1;
break;
default : break;
}
} while(i != startsAt);
}
else if (curChar < 128)
{
long l = 1L << (curChar & 077);
MatchLoop: do
{
switch(jjstateSet[--i])
{
default : break;
}
} while(i != startsAt);
}
else
{
int i2 = (curChar & 0xff) >> 6;
long l2 = 1L << (curChar & 077);
MatchLoop: do
{
switch(jjstateSet[--i])
{
default : break;
}
} while(i != startsAt);
}
if (kind != 0x7fffffff)
{
jjmatchedKind = kind;
jjmatchedPos = curPos;
kind = 0x7fffffff;
}
++curPos;
if ((i = jjnewStateCnt) == (startsAt = 3 - (jjnewStateCnt = startsAt)))
return curPos;
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) { return curPos; }
}
}
private final int jjStopStringLiteralDfa_0(int pos, long active0)
{
switch (pos)
{
case 0:
if ((active0 & 0xfff800L) != 0L)
{
jjmatchedKind = 32;
return 4;
}
return -1;
case 1:
if ((active0 & 0xfff800L) != 0L)
{
jjmatchedKind = 32;
jjmatchedPos = 1;
return 4;
}
return -1;
case 2:
if ((active0 & 0x7ef800L) != 0L)
{
jjmatchedKind = 32;
jjmatchedPos = 2;
return 4;
}
if ((active0 & 0x810000L) != 0L)
return 4;
return -1;
case 3:
if ((active0 & 0x24000L) != 0L)
return 4;
if ((active0 & 0x7cb800L) != 0L)
{
jjmatchedKind = 32;
jjmatchedPos = 3;
return 4;
}
return -1;
case 4:
if ((active0 & 0x41000L) != 0L)
return 4;
if ((active0 & 0x78a800L) != 0L)
{
jjmatchedKind = 32;
jjmatchedPos = 4;
return 4;
}
return -1;
case 5:
if ((active0 & 0x680800L) != 0L)
return 4;
if ((active0 & 0x10a000L) != 0L)
{
jjmatchedKind = 32;
jjmatchedPos = 5;
return 4;
}
return -1;
default :
return -1;
}
}
private final int jjStartNfa_0(int pos, long active0)
{
return jjMoveNfa_0(jjStopStringLiteralDfa_0(pos, active0), pos + 1);
}
private final int jjStopAtPos(int pos, int kind)
{
jjmatchedKind = kind;
jjmatchedPos = pos;
return pos + 1;
}
private final int jjStartNfaWithStates_0(int pos, int kind, int state)
{
jjmatchedKind = kind;
jjmatchedPos = pos;
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) { return pos + 1; }
return jjMoveNfa_0(state, pos + 1);
}
private final int jjMoveStringLiteralDfa0_0()
{
switch(curChar)
{
case 44:
return jjStopAtPos(0, 29);
case 46:
return jjStopAtPos(0, 30);
case 47:
return jjMoveStringLiteralDfa1_0(0x120L);
case 59:
return jjStopAtPos(0, 28);
case 60:
return jjStopAtPos(0, 26);
case 62:
return jjStopAtPos(0, 27);
case 98:
return jjMoveStringLiteralDfa1_0(0x20c000L);
case 99:
return jjMoveStringLiteralDfa1_0(0x1000L);
case 100:
return jjMoveStringLiteralDfa1_0(0x80000L);
case 102:
return jjMoveStringLiteralDfa1_0(0x40000L);
case 105:
return jjMoveStringLiteralDfa1_0(0x12000L);
case 108:
return jjMoveStringLiteralDfa1_0(0x20000L);
case 109:
return jjMoveStringLiteralDfa1_0(0x800800L);
case 117:
return jjMoveStringLiteralDfa1_0(0x100000L);
case 118:
return jjMoveStringLiteralDfa1_0(0x400000L);
case 123:
return jjStopAtPos(0, 24);
case 125:
return jjStopAtPos(0, 25);
default :
return jjMoveNfa_0(0, 0);
}
}
private final int jjMoveStringLiteralDfa1_0(long active0)
{
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
jjStopStringLiteralDfa_0(0, active0);
return 1;
}
switch(curChar)
{
case 42:
if ((active0 & 0x100L) != 0L)
return jjStopAtPos(1, 8);
break;
case 47:
if ((active0 & 0x20L) != 0L)
return jjStopAtPos(1, 5);
break;
case 97:
return jjMoveStringLiteralDfa2_0(active0, 0x800000L);
case 101:
return jjMoveStringLiteralDfa2_0(active0, 0x400000L);
case 108:
return jjMoveStringLiteralDfa2_0(active0, 0x41000L);
case 110:
return jjMoveStringLiteralDfa2_0(active0, 0x12000L);
case 111:
return jjMoveStringLiteralDfa2_0(active0, 0xa8800L);
case 115:
return jjMoveStringLiteralDfa2_0(active0, 0x100000L);
case 117:
return jjMoveStringLiteralDfa2_0(active0, 0x200000L);
case 121:
return jjMoveStringLiteralDfa2_0(active0, 0x4000L);
default :
break;
}
return jjStartNfa_0(0, active0);
}
private final int jjMoveStringLiteralDfa2_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjStartNfa_0(0, old0);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
jjStopStringLiteralDfa_0(1, active0);
return 2;
}
switch(curChar)
{
case 97:
return jjMoveStringLiteralDfa3_0(active0, 0x1000L);
case 99:
return jjMoveStringLiteralDfa3_0(active0, 0x402000L);
case 100:
return jjMoveStringLiteralDfa3_0(active0, 0x800L);
case 102:
return jjMoveStringLiteralDfa3_0(active0, 0x200000L);
case 110:
return jjMoveStringLiteralDfa3_0(active0, 0x20000L);
case 111:
return jjMoveStringLiteralDfa3_0(active0, 0x48000L);
case 112:
if ((active0 & 0x800000L) != 0L)
return jjStartNfaWithStates_0(2, 23, 4);
break;
case 116:
if ((active0 & 0x10000L) != 0L)
return jjStartNfaWithStates_0(2, 16, 4);
return jjMoveStringLiteralDfa3_0(active0, 0x104000L);
case 117:
return jjMoveStringLiteralDfa3_0(active0, 0x80000L);
default :
break;
}
return jjStartNfa_0(1, active0);
}
private final int jjMoveStringLiteralDfa3_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjStartNfa_0(1, old0);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
jjStopStringLiteralDfa_0(2, active0);
return 3;
}
switch(curChar)
{
case 97:
return jjMoveStringLiteralDfa4_0(active0, 0x40000L);
case 98:
return jjMoveStringLiteralDfa4_0(active0, 0x80000L);
case 101:
if ((active0 & 0x4000L) != 0L)
return jjStartNfaWithStates_0(3, 14, 4);
break;
case 102:
return jjMoveStringLiteralDfa4_0(active0, 0x200000L);
case 103:
if ((active0 & 0x20000L) != 0L)
return jjStartNfaWithStates_0(3, 17, 4);
break;
case 108:
return jjMoveStringLiteralDfa4_0(active0, 0xa000L);
case 114:
return jjMoveStringLiteralDfa4_0(active0, 0x100000L);
case 115:
return jjMoveStringLiteralDfa4_0(active0, 0x1000L);
case 116:
return jjMoveStringLiteralDfa4_0(active0, 0x400000L);
case 117:
return jjMoveStringLiteralDfa4_0(active0, 0x800L);
default :
break;
}
return jjStartNfa_0(2, active0);
}
private final int jjMoveStringLiteralDfa4_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjStartNfa_0(2, old0);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
jjStopStringLiteralDfa_0(3, active0);
return 4;
}
switch(curChar)
{
case 101:
return jjMoveStringLiteralDfa5_0(active0, 0x208000L);
case 105:
return jjMoveStringLiteralDfa5_0(active0, 0x100000L);
case 108:
return jjMoveStringLiteralDfa5_0(active0, 0x80800L);
case 111:
return jjMoveStringLiteralDfa5_0(active0, 0x400000L);
case 115:
if ((active0 & 0x1000L) != 0L)
return jjStartNfaWithStates_0(4, 12, 4);
break;
case 116:
if ((active0 & 0x40000L) != 0L)
return jjStartNfaWithStates_0(4, 18, 4);
break;
case 117:
return jjMoveStringLiteralDfa5_0(active0, 0x2000L);
default :
break;
}
return jjStartNfa_0(3, active0);
}
private final int jjMoveStringLiteralDfa5_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjStartNfa_0(3, old0);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
jjStopStringLiteralDfa_0(4, active0);
return 5;
}
switch(curChar)
{
case 97:
return jjMoveStringLiteralDfa6_0(active0, 0x8000L);
case 100:
return jjMoveStringLiteralDfa6_0(active0, 0x2000L);
case 101:
if ((active0 & 0x800L) != 0L)
return jjStartNfaWithStates_0(5, 11, 4);
else if ((active0 & 0x80000L) != 0L)
return jjStartNfaWithStates_0(5, 19, 4);
break;
case 110:
return jjMoveStringLiteralDfa6_0(active0, 0x100000L);
case 114:
if ((active0 & 0x200000L) != 0L)
return jjStartNfaWithStates_0(5, 21, 4);
else if ((active0 & 0x400000L) != 0L)
return jjStartNfaWithStates_0(5, 22, 4);
break;
default :
break;
}
return jjStartNfa_0(4, active0);
}
private final int jjMoveStringLiteralDfa6_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjStartNfa_0(4, old0);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
jjStopStringLiteralDfa_0(5, active0);
return 6;
}
switch(curChar)
{
case 101:
if ((active0 & 0x2000L) != 0L)
return jjStartNfaWithStates_0(6, 13, 4);
break;
case 103:
if ((active0 & 0x100000L) != 0L)
return jjStartNfaWithStates_0(6, 20, 4);
break;
case 110:
if ((active0 & 0x8000L) != 0L)
return jjStartNfaWithStates_0(6, 15, 4);
break;
default :
break;
}
return jjStartNfa_0(5, active0);
}
static final long[] jjbitVec0 = {
0x0L, 0x0L, 0xffffffffffffffffL, 0xffffffffffffffffL
};
private final int jjMoveNfa_0(int startState, int curPos)
{
int[] nextStates;
int startsAt = 0;
jjnewStateCnt = 5;
int i = 1;
jjstateSet[0] = startState;
int j, kind = 0x7fffffff;
for (;;)
{
if (++jjround == 0x7fffffff)
ReInitRounds();
if (curChar < 64)
{
long l = 1L << curChar;
MatchLoop: do
{
switch(jjstateSet[--i])
{
case 0:
if (curChar == 34)
jjCheckNAdd(1);
break;
case 1:
if ((0xfffffffbffffffffL & l) != 0L)
jjCheckNAddTwoStates(1, 2);
break;
case 2:
if (curChar == 34 && kind > 31)
kind = 31;
break;
case 4:
if ((0x3ff000000000000L & l) == 0L)
break;
if (kind > 32)
kind = 32;
jjstateSet[jjnewStateCnt++] = 4;
break;
default : break;
}
} while(i != startsAt);
}
else if (curChar < 128)
{
long l = 1L << (curChar & 077);
MatchLoop: do
{
switch(jjstateSet[--i])
{
case 0:
if ((0x7fffffe07fffffeL & l) == 0L)
break;
if (kind > 32)
kind = 32;
jjCheckNAdd(4);
break;
case 1:
jjAddStates(0, 1);
break;
case 4:
if ((0x7fffffe87fffffeL & l) == 0L)
break;
if (kind > 32)
kind = 32;
jjCheckNAdd(4);
break;
default : break;
}
} while(i != startsAt);
}
else
{
int i2 = (curChar & 0xff) >> 6;
long l2 = 1L << (curChar & 077);
MatchLoop: do
{
switch(jjstateSet[--i])
{
case 1:
if ((jjbitVec0[i2] & l2) != 0L)
jjAddStates(0, 1);
break;
default : break;
}
} while(i != startsAt);
}
if (kind != 0x7fffffff)
{
jjmatchedKind = kind;
jjmatchedPos = curPos;
kind = 0x7fffffff;
}
++curPos;
if ((i = jjnewStateCnt) == (startsAt = 5 - (jjnewStateCnt = startsAt)))
return curPos;
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) { return curPos; }
}
}
private final int jjMoveStringLiteralDfa0_2()
{
switch(curChar)
{
case 42:
return jjMoveStringLiteralDfa1_2(0x200L);
default :
return 1;
}
}
private final int jjMoveStringLiteralDfa1_2(long active0)
{
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
return 1;
}
switch(curChar)
{
case 47:
if ((active0 & 0x200L) != 0L)
return jjStopAtPos(1, 9);
break;
default :
return 2;
}
return 2;
}
static final int[] jjnextStates = {
1, 2,
};
public static final String[] jjstrLiteralImages = {
"", null, null, null, null, null, null, null, null, null, null,
"\155\157\144\165\154\145", "\143\154\141\163\163", "\151\156\143\154\165\144\145", "\142\171\164\145",
"\142\157\157\154\145\141\156", "\151\156\164", "\154\157\156\147", "\146\154\157\141\164",
"\144\157\165\142\154\145", "\165\163\164\162\151\156\147", "\142\165\146\146\145\162",
"\166\145\143\164\157\162", "\155\141\160", "\173", "\175", "\74", "\76", "\73", "\54", "\56", null, null, };
public static final String[] lexStateNames = {
"DEFAULT",
"WithinOneLineComment",
"WithinMultiLineComment",
};
public static final int[] jjnewLexState = {
-1, -1, -1, -1, -1, 1, 0, -1, 2, 0, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1,
};
static final long[] jjtoToken = {
0x1fffff801L,
};
static final long[] jjtoSkip = {
0x37eL,
};
static final long[] jjtoSpecial = {
0x360L,
};
static final long[] jjtoMore = {
0x480L,
};
protected SimpleCharStream input_stream;
private final int[] jjrounds = new int[5];
private final int[] jjstateSet = new int[10];
StringBuffer image;
int jjimageLen;
int lengthOfMatch;
protected char curChar;
public RccTokenManager(SimpleCharStream stream){
if (SimpleCharStream.staticFlag)
throw new Error("ERROR: Cannot use a static CharStream class with a non-static lexical analyzer.");
input_stream = stream;
}
public RccTokenManager(SimpleCharStream stream, int lexState){
this(stream);
SwitchTo(lexState);
}
public void ReInit(SimpleCharStream stream)
{
jjmatchedPos = jjnewStateCnt = 0;
curLexState = defaultLexState;
input_stream = stream;
ReInitRounds();
}
private final void ReInitRounds()
{
int i;
jjround = 0x80000001;
for (i = 5; i-- > 0;)
jjrounds[i] = 0x80000000;
}
public void ReInit(SimpleCharStream stream, int lexState)
{
ReInit(stream);
SwitchTo(lexState);
}
public void SwitchTo(int lexState)
{
if (lexState >= 3 || lexState < 0)
throw new TokenMgrError("Error: Ignoring invalid lexical state : " + lexState + ". State unchanged.", TokenMgrError.INVALID_LEXICAL_STATE);
else
curLexState = lexState;
}
protected Token jjFillToken()
{
Token t = Token.newToken(jjmatchedKind);
t.kind = jjmatchedKind;
String im = jjstrLiteralImages[jjmatchedKind];
t.image = (im == null) ? input_stream.GetImage() : im;
t.beginLine = input_stream.getBeginLine();
t.beginColumn = input_stream.getBeginColumn();
t.endLine = input_stream.getEndLine();
t.endColumn = input_stream.getEndColumn();
return t;
}
int curLexState = 0;
int defaultLexState = 0;
int jjnewStateCnt;
int jjround;
int jjmatchedPos;
int jjmatchedKind;
public Token getNextToken()
{
int kind;
Token specialToken = null;
Token matchedToken;
int curPos = 0;
EOFLoop :
for (;;)
{
try
{
curChar = input_stream.BeginToken();
}
catch(java.io.IOException e)
{
jjmatchedKind = 0;
matchedToken = jjFillToken();
matchedToken.specialToken = specialToken;
return matchedToken;
}
image = null;
jjimageLen = 0;
for (;;)
{
switch(curLexState)
{
case 0:
try { input_stream.backup(0);
while (curChar <= 32 && (0x100002600L & (1L << curChar)) != 0L)
curChar = input_stream.BeginToken();
}
catch (java.io.IOException e1) { continue EOFLoop; }
jjmatchedKind = 0x7fffffff;
jjmatchedPos = 0;
curPos = jjMoveStringLiteralDfa0_0();
break;
case 1:
jjmatchedKind = 0x7fffffff;
jjmatchedPos = 0;
curPos = jjMoveStringLiteralDfa0_1();
if (jjmatchedPos == 0 && jjmatchedKind > 7)
{
jjmatchedKind = 7;
}
break;
case 2:
jjmatchedKind = 0x7fffffff;
jjmatchedPos = 0;
curPos = jjMoveStringLiteralDfa0_2();
if (jjmatchedPos == 0 && jjmatchedKind > 10)
{
jjmatchedKind = 10;
}
break;
}
if (jjmatchedKind != 0x7fffffff)
{
if (jjmatchedPos + 1 < curPos)
input_stream.backup(curPos - jjmatchedPos - 1);
if ((jjtoToken[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L)
{
matchedToken = jjFillToken();
matchedToken.specialToken = specialToken;
if (jjnewLexState[jjmatchedKind] != -1)
curLexState = jjnewLexState[jjmatchedKind];
return matchedToken;
}
else if ((jjtoSkip[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L)
{
if ((jjtoSpecial[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L)
{
matchedToken = jjFillToken();
if (specialToken == null)
specialToken = matchedToken;
else
{
matchedToken.specialToken = specialToken;
specialToken = (specialToken.next = matchedToken);
}
SkipLexicalActions(matchedToken);
}
else
SkipLexicalActions(null);
if (jjnewLexState[jjmatchedKind] != -1)
curLexState = jjnewLexState[jjmatchedKind];
continue EOFLoop;
}
jjimageLen += jjmatchedPos + 1;
if (jjnewLexState[jjmatchedKind] != -1)
curLexState = jjnewLexState[jjmatchedKind];
curPos = 0;
jjmatchedKind = 0x7fffffff;
try {
curChar = input_stream.readChar();
continue;
}
catch (java.io.IOException e1) { }
}
int error_line = input_stream.getEndLine();
int error_column = input_stream.getEndColumn();
String error_after = null;
boolean EOFSeen = false;
try { input_stream.readChar(); input_stream.backup(1); }
catch (java.io.IOException e1) {
EOFSeen = true;
error_after = curPos <= 1 ? "" : input_stream.GetImage();
if (curChar == '\n' || curChar == '\r') {
error_line++;
error_column = 0;
}
else
error_column++;
}
if (!EOFSeen) {
input_stream.backup(1);
error_after = curPos <= 1 ? "" : input_stream.GetImage();
}
throw new TokenMgrError(EOFSeen, curLexState, error_line, error_column, error_after, curChar, TokenMgrError.LEXICAL_ERROR);
}
}
}
void SkipLexicalActions(Token matchedToken)
{
switch(jjmatchedKind)
{
default :
break;
}
}
}

View File

@ -0,0 +1,446 @@
/* Generated By:JavaCC: Do not edit this line. SimpleCharStream.java Version 4.0 */
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.compiler.generated;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* An implementation of interface CharStream, where the stream is assumed to
* contain only ASCII characters (without unicode processing).
*
* @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class SimpleCharStream
{
public static final boolean staticFlag = false;
int bufsize;
int available;
int tokenBegin;
public int bufpos = -1;
protected int bufline[];
protected int bufcolumn[];
protected int column = 0;
protected int line = 1;
protected boolean prevCharIsCR = false;
protected boolean prevCharIsLF = false;
protected java.io.Reader inputStream;
protected char[] buffer;
protected int maxNextCharInd = 0;
protected int inBuf = 0;
protected int tabSize = 8;
protected void setTabSize(int i) { tabSize = i; }
protected int getTabSize(int i) { return tabSize; }
protected void ExpandBuff(boolean wrapAround)
{
char[] newbuffer = new char[bufsize + 2048];
int newbufline[] = new int[bufsize + 2048];
int newbufcolumn[] = new int[bufsize + 2048];
try
{
if (wrapAround)
{
System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin);
System.arraycopy(buffer, 0, newbuffer,
bufsize - tokenBegin, bufpos);
buffer = newbuffer;
System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin);
System.arraycopy(bufline, 0, newbufline, bufsize - tokenBegin, bufpos);
bufline = newbufline;
System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin);
System.arraycopy(bufcolumn, 0, newbufcolumn, bufsize - tokenBegin, bufpos);
bufcolumn = newbufcolumn;
maxNextCharInd = (bufpos += (bufsize - tokenBegin));
}
else
{
System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin);
buffer = newbuffer;
System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin);
bufline = newbufline;
System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin);
bufcolumn = newbufcolumn;
maxNextCharInd = (bufpos -= tokenBegin);
}
}
catch (Throwable t)
{
throw new Error(t.getMessage());
}
bufsize += 2048;
available = bufsize;
tokenBegin = 0;
}
protected void FillBuff() throws java.io.IOException
{
if (maxNextCharInd == available)
{
if (available == bufsize)
{
if (tokenBegin > 2048)
{
bufpos = maxNextCharInd = 0;
available = tokenBegin;
}
else if (tokenBegin < 0)
bufpos = maxNextCharInd = 0;
else
ExpandBuff(false);
}
else if (available > tokenBegin)
available = bufsize;
else if ((tokenBegin - available) < 2048)
ExpandBuff(true);
else
available = tokenBegin;
}
int i;
try {
if ((i = inputStream.read(buffer, maxNextCharInd,
available - maxNextCharInd)) == -1)
{
inputStream.close();
throw new java.io.IOException();
}
else
maxNextCharInd += i;
return;
}
catch(java.io.IOException e) {
--bufpos;
backup(0);
if (tokenBegin == -1)
tokenBegin = bufpos;
throw e;
}
}
public char BeginToken() throws java.io.IOException
{
tokenBegin = -1;
char c = readChar();
tokenBegin = bufpos;
return c;
}
protected void UpdateLineColumn(char c)
{
column++;
if (prevCharIsLF)
{
prevCharIsLF = false;
line += (column = 1);
}
else if (prevCharIsCR)
{
prevCharIsCR = false;
if (c == '\n')
{
prevCharIsLF = true;
}
else
line += (column = 1);
}
switch (c)
{
case '\r' :
prevCharIsCR = true;
break;
case '\n' :
prevCharIsLF = true;
break;
case '\t' :
column--;
column += (tabSize - (column % tabSize));
break;
default :
break;
}
bufline[bufpos] = line;
bufcolumn[bufpos] = column;
}
public char readChar() throws java.io.IOException
{
if (inBuf > 0)
{
--inBuf;
if (++bufpos == bufsize)
bufpos = 0;
return buffer[bufpos];
}
if (++bufpos >= maxNextCharInd)
FillBuff();
char c = buffer[bufpos];
UpdateLineColumn(c);
return (c);
}
public int getEndColumn() {
return bufcolumn[bufpos];
}
public int getEndLine() {
return bufline[bufpos];
}
public int getBeginColumn() {
return bufcolumn[tokenBegin];
}
public int getBeginLine() {
return bufline[tokenBegin];
}
public void backup(int amount) {
inBuf += amount;
if ((bufpos -= amount) < 0)
bufpos += bufsize;
}
public SimpleCharStream(java.io.Reader dstream, int startline,
int startcolumn, int buffersize)
{
inputStream = dstream;
line = startline;
column = startcolumn - 1;
available = bufsize = buffersize;
buffer = new char[buffersize];
bufline = new int[buffersize];
bufcolumn = new int[buffersize];
}
public SimpleCharStream(java.io.Reader dstream, int startline,
int startcolumn)
{
this(dstream, startline, startcolumn, 4096);
}
public SimpleCharStream(java.io.Reader dstream)
{
this(dstream, 1, 1, 4096);
}
public void ReInit(java.io.Reader dstream, int startline,
int startcolumn, int buffersize)
{
inputStream = dstream;
line = startline;
column = startcolumn - 1;
if (buffer == null || buffersize != buffer.length)
{
available = bufsize = buffersize;
buffer = new char[buffersize];
bufline = new int[buffersize];
bufcolumn = new int[buffersize];
}
prevCharIsLF = prevCharIsCR = false;
tokenBegin = inBuf = maxNextCharInd = 0;
bufpos = -1;
}
public void ReInit(java.io.Reader dstream, int startline,
int startcolumn)
{
ReInit(dstream, startline, startcolumn, 4096);
}
public void ReInit(java.io.Reader dstream)
{
ReInit(dstream, 1, 1, 4096);
}
public SimpleCharStream(java.io.InputStream dstream, String encoding, int startline,
int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException
{
this(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize);
}
public SimpleCharStream(java.io.InputStream dstream, int startline,
int startcolumn, int buffersize)
{
this(new java.io.InputStreamReader(dstream), startline, startcolumn, buffersize);
}
public SimpleCharStream(java.io.InputStream dstream, String encoding, int startline,
int startcolumn) throws java.io.UnsupportedEncodingException
{
this(dstream, encoding, startline, startcolumn, 4096);
}
public SimpleCharStream(java.io.InputStream dstream, int startline,
int startcolumn)
{
this(dstream, startline, startcolumn, 4096);
}
public SimpleCharStream(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException
{
this(dstream, encoding, 1, 1, 4096);
}
public SimpleCharStream(java.io.InputStream dstream)
{
this(dstream, 1, 1, 4096);
}
public void ReInit(java.io.InputStream dstream, String encoding, int startline,
int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException
{
ReInit(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize);
}
public void ReInit(java.io.InputStream dstream, int startline,
int startcolumn, int buffersize)
{
ReInit(new java.io.InputStreamReader(dstream), startline, startcolumn, buffersize);
}
public void ReInit(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException
{
ReInit(dstream, encoding, 1, 1, 4096);
}
public void ReInit(java.io.InputStream dstream)
{
ReInit(dstream, 1, 1, 4096);
}
public void ReInit(java.io.InputStream dstream, String encoding, int startline,
int startcolumn) throws java.io.UnsupportedEncodingException
{
ReInit(dstream, encoding, startline, startcolumn, 4096);
}
public void ReInit(java.io.InputStream dstream, int startline,
int startcolumn)
{
ReInit(dstream, startline, startcolumn, 4096);
}
public String GetImage()
{
if (bufpos >= tokenBegin)
return new String(buffer, tokenBegin, bufpos - tokenBegin + 1);
else
return new String(buffer, tokenBegin, bufsize - tokenBegin) +
new String(buffer, 0, bufpos + 1);
}
public char[] GetSuffix(int len)
{
char[] ret = new char[len];
if ((bufpos + 1) >= len)
System.arraycopy(buffer, bufpos - len + 1, ret, 0, len);
else
{
System.arraycopy(buffer, bufsize - (len - bufpos - 1), ret, 0,
len - bufpos - 1);
System.arraycopy(buffer, 0, ret, len - bufpos - 1, bufpos + 1);
}
return ret;
}
public void Done()
{
buffer = null;
bufline = null;
bufcolumn = null;
}
/**
* Method to adjust line and column numbers for the start of a token.
*/
public void adjustBeginLineColumn(int newLine, int newCol)
{
int start = tokenBegin;
int len;
if (bufpos >= tokenBegin)
{
len = bufpos - tokenBegin + inBuf + 1;
}
else
{
len = bufsize - tokenBegin + bufpos + 1 + inBuf;
}
int i = 0, j = 0, k = 0;
int nextColDiff = 0, columnDiff = 0;
while (i < len &&
bufline[j = start % bufsize] == bufline[k = ++start % bufsize])
{
bufline[j] = newLine;
nextColDiff = columnDiff + bufcolumn[k] - bufcolumn[j];
bufcolumn[j] = newCol + columnDiff;
columnDiff = nextColDiff;
i++;
}
if (i < len)
{
bufline[j] = newLine++;
bufcolumn[j] = newCol + columnDiff;
while (i++ < len)
{
if (bufline[j = start % bufsize] != bufline[++start % bufsize])
bufline[j] = newLine++;
else
bufline[j] = newLine;
}
}
line = bufline[j];
column = bufcolumn[j];
}
}

View File

@ -0,0 +1,107 @@
/* Generated By:JavaCC: Do not edit this line. Token.java Version 3.0 */
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.compiler.generated;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* Describes the input token stream.
*
* @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class Token {
/**
* An integer that describes the kind of this token. This numbering
* system is determined by JavaCCParser, and a table of these numbers is
* stored in the file ...Constants.java.
*/
public int kind;
/**
* beginLine and beginColumn describe the position of the first character
* of this token; endLine and endColumn describe the position of the
* last character of this token.
*/
public int beginLine, beginColumn, endLine, endColumn;
/**
* The string image of the token.
*/
public String image;
/**
* A reference to the next regular (non-special) token from the input
* stream. If this is the last token from the input stream, or if the
* token manager has not read tokens beyond this one, this field is
* set to null. This is true only if this token is also a regular
* token. Otherwise, see below for a description of the contents of
* this field.
*/
public Token next;
/**
* This field is used to access special tokens that occur prior to this
* token, but after the immediately preceding regular (non-special) token.
* If there are no such special tokens, this field is set to null.
* When there are more than one such special token, this field refers
* to the last of these special tokens, which in turn refers to the next
* previous special token through its specialToken field, and so on
* until the first special token (whose specialToken field is null).
* The next fields of special tokens refer to other special tokens that
* immediately follow it (without an intervening regular token). If there
* is no such token, this field is null.
*/
public Token specialToken;
/**
* Returns the image.
*/
@Override
public String toString()
{
return image;
}
/**
* Returns a new Token object, by default. However, if you want, you
* can create and return subclass objects based on the value of ofKind.
* Simply add the cases to the switch for all those special cases.
* For example, if you have a subclass of Token called IDToken that
* you want to create if ofKind is ID, simlpy add something like :
*
* case MyParserConstants.ID : return new IDToken();
*
* to the following switch statement. Then you can cast matchedToken
* variable to the appropriate type and use it in your lexical actions.
*/
public static final Token newToken(int ofKind)
{
switch(ofKind)
{
default : return new Token();
}
}
}

View File

@ -0,0 +1,161 @@
/* Generated By:JavaCC: Do not edit this line. TokenMgrError.java Version 3.0 */
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.compiler.generated;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class TokenMgrError extends Error
{
/*
* Ordinals for various reasons why an Error of this type can be thrown.
*/
/**
* Lexical error occured.
*/
static final int LEXICAL_ERROR = 0;
/**
* An attempt wass made to create a second instance of a static token manager.
*/
static final int STATIC_LEXER_ERROR = 1;
/**
* Tried to change to an invalid lexical state.
*/
static final int INVALID_LEXICAL_STATE = 2;
/**
* Detected (and bailed out of) an infinite loop in the token manager.
*/
static final int LOOP_DETECTED = 3;
/**
* Indicates the reason why the exception is thrown. It will have
* one of the above 4 values.
*/
int errorCode;
/**
* Replaces unprintable characters by their espaced (or unicode escaped)
* equivalents in the given string
*/
protected static final String addEscapes(String str) {
StringBuffer retval = new StringBuffer();
char ch;
for (int i = 0; i < str.length(); i++) {
switch (str.charAt(i))
{
case 0 :
continue;
case '\b':
retval.append("\\b");
continue;
case '\t':
retval.append("\\t");
continue;
case '\n':
retval.append("\\n");
continue;
case '\f':
retval.append("\\f");
continue;
case '\r':
retval.append("\\r");
continue;
case '\"':
retval.append("\\\"");
continue;
case '\'':
retval.append("\\\'");
continue;
case '\\':
retval.append("\\\\");
continue;
default:
if ((ch = str.charAt(i)) < 0x20 || ch > 0x7e) {
String s = "0000" + Integer.toString(ch, 16);
retval.append("\\u" + s.substring(s.length() - 4, s.length()));
} else {
retval.append(ch);
}
continue;
}
}
return retval.toString();
}
/**
* Returns a detailed message for the Error when it is thrown by the
* token manager to indicate a lexical error.
* Parameters :
* EOFSeen : indicates if EOF caused the lexicl error
* curLexState : lexical state in which this error occured
* errorLine : line number when the error occured
* errorColumn : column number when the error occured
* errorAfter : prefix that was seen before this error occured
* curchar : the offending character
* Note: You can customize the lexical error message by modifying this method.
*/
protected static String LexicalError(boolean EOFSeen, int lexState, int errorLine, int errorColumn, String errorAfter, char curChar) {
return("Lexical error at line " +
errorLine + ", column " +
errorColumn + ". Encountered: " +
(EOFSeen ? "<EOF> " : ("\"" + addEscapes(String.valueOf(curChar)) + "\"") + " (" + (int)curChar + "), ") +
"after : \"" + addEscapes(errorAfter) + "\"");
}
/**
* You can also modify the body of this method to customize your error messages.
* For example, cases like LOOP_DETECTED and INVALID_LEXICAL_STATE are not
* of end-users concern, so you can return something like :
*
* "Internal Error : Please file a bug report .... "
*
* from this method for such cases in the release version of your parser.
*/
@Override
public String getMessage() {
return super.getMessage();
}
/*
* Constructors of various flavors follow.
*/
public TokenMgrError() {
}
public TokenMgrError(String message, int reason) {
super(message);
errorCode = reason;
}
public TokenMgrError(boolean EOFSeen, int lexState, int errorLine, int errorColumn, String errorAfter, char curChar, int reason) {
this(LexicalError(EOFSeen, lexState, errorLine, errorColumn, errorAfter, curChar), reason);
}
}

View File

@ -0,0 +1,35 @@
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
<html>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<head>
<title>Hadoop Record Compiler: Parser</title>
</head>
<body>
<p>
(DEPRECATED) This package contains code generated by JavaCC from the
Hadoop record syntax file rcc.jj. For details about the
record file syntax please @see org.apache.hadoop.record.
</p>
<p>
DEPRECATED: Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
</p>
</body>
</html>

View File

@ -0,0 +1,384 @@
options {
STATIC=false;
}
PARSER_BEGIN(Rcc)
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.compiler.generated;
import org.apache.hadoop.record.compiler.*;
import java.util.ArrayList;
import java.util.Hashtable;
import java.util.Iterator;
import java.io.File;
import java.io.FileReader;
import java.io.FileNotFoundException;
import java.io.IOException;
public class Rcc {
private static String language = "java";
private static String destDir = ".";
private static ArrayList<String> recFiles = new ArrayList<String>();
private static ArrayList<String> cmdargs = new ArrayList<String>();
private static JFile curFile;
private static Hashtable<String,JRecord> recTab;
private static String curDir = ".";
private static String curFileName;
private static String curModuleName;
public static void main(String[] args) {
System.exit(driver(args));
}
public static void usage() {
System.err.println("Usage: rcc --language [java|c++] ddl-files");
}
public static int driver(String[] args) {
for (int i=0; i<args.length; i++) {
if ("-l".equalsIgnoreCase(args[i]) ||
"--language".equalsIgnoreCase(args[i])) {
language = args[i+1].toLowerCase();
i++;
} else if ("-d".equalsIgnoreCase(args[i]) ||
"--destdir".equalsIgnoreCase(args[i])) {
destDir = args[i+1];
i++;
} else if (args[i].startsWith("-")) {
String arg = args[i].substring(1);
if (arg.startsWith("-")) {
arg = arg.substring(1);
}
cmdargs.add(arg.toLowerCase());
} else {
recFiles.add(args[i]);
}
}
if (recFiles.size() == 0) {
usage();
return 1;
}
for (int i=0; i<recFiles.size(); i++) {
curFileName = recFiles.get(i);
File file = new File(curFileName);
try {
FileReader reader = new FileReader(file);
Rcc parser = new Rcc(reader);
try {
recTab = new Hashtable<String,JRecord>();
curFile = parser.Input();
} catch (ParseException e) {
System.err.println(e.toString());
return 1;
}
try {
reader.close();
} catch (IOException e) {
}
} catch (FileNotFoundException e) {
System.err.println("File " + (String) recFiles.get(i) +
" Not found.");
return 1;
}
try {
int retCode = curFile.genCode(language, destDir, cmdargs);
if (retCode != 0) { return retCode; }
} catch (IOException e) {
System.err.println(e.toString());
return 1;
}
}
return 0;
}
}
PARSER_END(Rcc)
SKIP :
{
" "
| "\t"
| "\n"
| "\r"
}
SPECIAL_TOKEN :
{
"//" : WithinOneLineComment
}
<WithinOneLineComment> SPECIAL_TOKEN :
{
<("\n" | "\r" | "\r\n" )> : DEFAULT
}
<WithinOneLineComment> MORE :
{
<~[]>
}
SPECIAL_TOKEN :
{
"/*" : WithinMultiLineComment
}
<WithinMultiLineComment> SPECIAL_TOKEN :
{
"*/" : DEFAULT
}
<WithinMultiLineComment> MORE :
{
<~[]>
}
TOKEN :
{
<MODULE_TKN: "module">
| <RECORD_TKN: "class">
| <INCLUDE_TKN: "include">
| <BYTE_TKN: "byte">
| <BOOLEAN_TKN: "boolean">
| <INT_TKN: "int">
| <LONG_TKN: "long">
| <FLOAT_TKN: "float">
| <DOUBLE_TKN: "double">
| <USTRING_TKN: "ustring">
| <BUFFER_TKN: "buffer">
| <VECTOR_TKN: "vector">
| <MAP_TKN: "map">
| <LBRACE_TKN: "{">
| <RBRACE_TKN: "}">
| <LT_TKN: "<">
| <GT_TKN: ">">
| <SEMICOLON_TKN: ";">
| <COMMA_TKN: ",">
| <DOT_TKN: ".">
| <CSTRING_TKN: "\"" ( ~["\""] )+ "\"">
| <IDENT_TKN: ["A"-"Z","a"-"z"] (["a"-"z","A"-"Z","0"-"9","_"])*>
}
JFile Input() :
{
ArrayList<JFile> ilist = new ArrayList<JFile>();
ArrayList<JRecord> rlist = new ArrayList<JRecord>();
JFile i;
ArrayList<JRecord> l;
}
{
(
i = Include()
{ ilist.add(i); }
| l = Module()
{ rlist.addAll(l); }
)+
<EOF>
{ return new JFile(curFileName, ilist, rlist); }
}
JFile Include() :
{
String fname;
Token t;
}
{
<INCLUDE_TKN>
t = <CSTRING_TKN>
{
JFile ret = null;
fname = t.image.replaceAll("^\"", "").replaceAll("\"$","");
File file = new File(curDir, fname);
String tmpDir = curDir;
String tmpFile = curFileName;
curDir = file.getParent();
curFileName = file.getName();
try {
FileReader reader = new FileReader(file);
Rcc parser = new Rcc(reader);
try {
ret = parser.Input();
System.out.println(fname + " Parsed Successfully");
} catch (ParseException e) {
System.out.println(e.toString());
System.exit(1);
}
try {
reader.close();
} catch (IOException e) {
}
} catch (FileNotFoundException e) {
System.out.println("File " + fname +
" Not found.");
System.exit(1);
}
curDir = tmpDir;
curFileName = tmpFile;
return ret;
}
}
ArrayList<JRecord> Module() :
{
String mName;
ArrayList<JRecord> rlist;
}
{
<MODULE_TKN>
mName = ModuleName()
{ curModuleName = mName; }
<LBRACE_TKN>
rlist = RecordList()
<RBRACE_TKN>
{ return rlist; }
}
String ModuleName() :
{
String name = "";
Token t;
}
{
t = <IDENT_TKN>
{ name += t.image; }
(
<DOT_TKN>
t = <IDENT_TKN>
{ name += "." + t.image; }
)*
{ return name; }
}
ArrayList<JRecord> RecordList() :
{
ArrayList<JRecord> rlist = new ArrayList<JRecord>();
JRecord r;
}
{
(
r = Record()
{ rlist.add(r); }
)+
{ return rlist; }
}
JRecord Record() :
{
String rname;
ArrayList<JField<JType>> flist = new ArrayList<JField<JType>>();
Token t;
JField<JType> f;
}
{
<RECORD_TKN>
t = <IDENT_TKN>
{ rname = t.image; }
<LBRACE_TKN>
(
f = Field()
{ flist.add(f); }
<SEMICOLON_TKN>
)+
<RBRACE_TKN>
{
String fqn = curModuleName + "." + rname;
JRecord r = new JRecord(fqn, flist);
recTab.put(fqn, r);
return r;
}
}
JField<JType> Field() :
{
JType jt;
Token t;
}
{
jt = Type()
t = <IDENT_TKN>
{ return new JField<JType>(t.image, jt); }
}
JType Type() :
{
JType jt;
Token t;
String rname;
}
{
jt = Map()
{ return jt; }
| jt = Vector()
{ return jt; }
| <BYTE_TKN>
{ return new JByte(); }
| <BOOLEAN_TKN>
{ return new JBoolean(); }
| <INT_TKN>
{ return new JInt(); }
| <LONG_TKN>
{ return new JLong(); }
| <FLOAT_TKN>
{ return new JFloat(); }
| <DOUBLE_TKN>
{ return new JDouble(); }
| <USTRING_TKN>
{ return new JString(); }
| <BUFFER_TKN>
{ return new JBuffer(); }
| rname = ModuleName()
{
if (rname.indexOf('.', 0) < 0) {
rname = curModuleName + "." + rname;
}
JRecord r = recTab.get(rname);
if (r == null) {
System.out.println("Type " + rname + " not known. Exiting.");
System.exit(1);
}
return r;
}
}
JMap Map() :
{
JType jt1;
JType jt2;
}
{
<MAP_TKN>
<LT_TKN>
jt1 = Type()
<COMMA_TKN>
jt2 = Type()
<GT_TKN>
{ return new JMap(jt1, jt2); }
}
JVector Vector() :
{
JType jt;
}
{
<VECTOR_TKN>
<LT_TKN>
jt = Type()
<GT_TKN>
{ return new JVector(jt); }
}

View File

@ -0,0 +1,37 @@
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
<html>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<head>
<title>Hadoop Record Compiler</title>
</head>
<body>
<p>
(DEPRECATED) This package contains classes needed for code generation
from the hadoop record compiler. CppGenerator and JavaGenerator
are the main entry points from the parser. There are classes
corrsponding to every primitive type and compound type
included in Hadoop record I/O syntax.
</p>
<p>
DEPRECATED: Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
</p>
</body>
</html>

View File

@ -0,0 +1,107 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.meta;
import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.record.RecordOutput;
/**
* Represents a type information for a field, which is made up of its
* ID (name) and its type (a TypeID object).
*
* @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class FieldTypeInfo
{
private String fieldID;
private TypeID typeID;
/**
* Construct a FiledTypeInfo with the given field name and the type
*/
FieldTypeInfo(String fieldID, TypeID typeID) {
this.fieldID = fieldID;
this.typeID = typeID;
}
/**
* get the field's TypeID object
*/
public TypeID getTypeID() {
return typeID;
}
/**
* get the field's id (name)
*/
public String getFieldID() {
return fieldID;
}
void write(RecordOutput rout, String tag) throws IOException {
rout.writeString(fieldID, tag);
typeID.write(rout, tag);
}
/**
* Two FieldTypeInfos are equal if ach of their fields matches
*/
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (!(o instanceof FieldTypeInfo))
return false;
FieldTypeInfo fti = (FieldTypeInfo) o;
// first check if fieldID matches
if (!this.fieldID.equals(fti.fieldID)) {
return false;
}
// now see if typeID matches
return (this.typeID.equals(fti.typeID));
}
/**
* We use a basic hashcode implementation, since this class will likely not
* be used as a hashmap key
*/
@Override
public int hashCode() {
return 37*17+typeID.hashCode() + 37*17+fieldID.hashCode();
}
public boolean equals(FieldTypeInfo ti) {
// first check if fieldID matches
if (!this.fieldID.equals(ti.fieldID)) {
return false;
}
// now see if typeID matches
return (this.typeID.equals(ti.typeID));
}
}

View File

@ -0,0 +1,90 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.meta;
import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.record.RecordOutput;
/**
* Represents typeID for a Map
*
* @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class MapTypeID extends TypeID {
private TypeID typeIDKey;
private TypeID typeIDValue;
public MapTypeID(TypeID typeIDKey, TypeID typeIDValue) {
super(RIOType.MAP);
this.typeIDKey = typeIDKey;
this.typeIDValue = typeIDValue;
}
/**
* get the TypeID of the map's key element
*/
public TypeID getKeyTypeID() {
return this.typeIDKey;
}
/**
* get the TypeID of the map's value element
*/
public TypeID getValueTypeID() {
return this.typeIDValue;
}
@Override
void write(RecordOutput rout, String tag) throws IOException {
rout.writeByte(typeVal, tag);
typeIDKey.write(rout, tag);
typeIDValue.write(rout, tag);
}
/**
* Two map typeIDs are equal if their constituent elements have the
* same type
*/
@Override
public boolean equals(Object o) {
if (!super.equals(o))
return false;
MapTypeID mti = (MapTypeID) o;
return this.typeIDKey.equals(mti.typeIDKey) &&
this.typeIDValue.equals(mti.typeIDValue);
}
/**
* We use a basic hashcode implementation, since this class will likely not
* be used as a hashmap key
*/
@Override
public int hashCode() {
return 37*17+typeIDKey.hashCode() + 37*17+typeIDValue.hashCode();
}
}

View File

@ -0,0 +1,161 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.meta;
import java.io.IOException;
import java.util.*;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.record.RecordInput;
import org.apache.hadoop.record.RecordOutput;
/**
* A record's Type Information object which can read/write itself.
*
* Type information for a record comprises metadata about the record,
* as well as a collection of type information for each field in the record.
*
* @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class RecordTypeInfo extends org.apache.hadoop.record.Record
{
private String name;
// A RecordTypeInfo is really just a wrapper around StructTypeID
StructTypeID sTid;
// A RecordTypeInfo object is just a collection of TypeInfo objects for each of its fields.
//private ArrayList<FieldTypeInfo> typeInfos = new ArrayList<FieldTypeInfo>();
// we keep a hashmap of struct/record names and their type information, as we need it to
// set filters when reading nested structs. This map is used during deserialization.
//private Map<String, RecordTypeInfo> structRTIs = new HashMap<String, RecordTypeInfo>();
/**
* Create an empty RecordTypeInfo object.
*/
public RecordTypeInfo() {
sTid = new StructTypeID();
}
/**
* Create a RecordTypeInfo object representing a record with the given name
* @param name Name of the record
*/
public RecordTypeInfo(String name) {
this.name = name;
sTid = new StructTypeID();
}
/*
* private constructor
*/
private RecordTypeInfo(String name, StructTypeID stid) {
this.sTid = stid;
this.name = name;
}
/**
* return the name of the record
*/
public String getName() {
return name;
}
/**
* set the name of the record
*/
public void setName(String name) {
this.name = name;
}
/**
* Add a field.
* @param fieldName Name of the field
* @param tid Type ID of the field
*/
public void addField(String fieldName, TypeID tid) {
sTid.getFieldTypeInfos().add(new FieldTypeInfo(fieldName, tid));
}
private void addAll(Collection<FieldTypeInfo> tis) {
sTid.getFieldTypeInfos().addAll(tis);
}
/**
* Return a collection of field type infos
*/
public Collection<FieldTypeInfo> getFieldTypeInfos() {
return sTid.getFieldTypeInfos();
}
/**
* Return the type info of a nested record. We only consider nesting
* to one level.
* @param name Name of the nested record
*/
public RecordTypeInfo getNestedStructTypeInfo(String name) {
StructTypeID stid = sTid.findStruct(name);
if (null == stid) return null;
return new RecordTypeInfo(name, stid);
}
/**
* Serialize the type information for a record
*/
@Override
public void serialize(RecordOutput rout, String tag) throws IOException {
// write out any header, version info, here
rout.startRecord(this, tag);
rout.writeString(name, tag);
sTid.writeRest(rout, tag);
rout.endRecord(this, tag);
}
/**
* Deserialize the type information for a record
*/
@Override
public void deserialize(RecordInput rin, String tag) throws IOException {
// read in any header, version info
rin.startRecord(tag);
// name
this.name = rin.readString(tag);
sTid.read(rin, tag);
rin.endRecord(tag);
}
/**
* This class doesn't implement Comparable as it's not meant to be used
* for anything besides de/serializing.
* So we always throw an exception.
* Not implemented. Always returns 0 if another RecordTypeInfo is passed in.
*/
@Override
public int compareTo (final Object peer_) throws ClassCastException {
if (!(peer_ instanceof RecordTypeInfo)) {
throw new ClassCastException("Comparing different types of records.");
}
throw new UnsupportedOperationException("compareTo() is not supported");
}
}

View File

@ -0,0 +1,166 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.meta;
import java.io.IOException;
import java.util.*;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.record.RecordInput;
import org.apache.hadoop.record.RecordOutput;
/**
* Represents typeID for a struct
*
* @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class StructTypeID extends TypeID {
private ArrayList<FieldTypeInfo> typeInfos = new ArrayList<FieldTypeInfo>();
StructTypeID() {
super(RIOType.STRUCT);
}
/**
* Create a StructTypeID based on the RecordTypeInfo of some record
*/
public StructTypeID(RecordTypeInfo rti) {
super(RIOType.STRUCT);
typeInfos.addAll(rti.getFieldTypeInfos());
}
void add (FieldTypeInfo ti) {
typeInfos.add(ti);
}
public Collection<FieldTypeInfo> getFieldTypeInfos() {
return typeInfos;
}
/*
* return the StructTypeiD, if any, of the given field
*/
StructTypeID findStruct(String name) {
// walk through the list, searching. Not the most efficient way, but this
// in intended to be used rarely, so we keep it simple.
// As an optimization, we can keep a hashmap of record name to its RTI, for later.
for (FieldTypeInfo ti : typeInfos) {
if ((0 == ti.getFieldID().compareTo(name)) && (ti.getTypeID().getTypeVal() == RIOType.STRUCT)) {
return (StructTypeID) ti.getTypeID();
}
}
return null;
}
@Override
void write(RecordOutput rout, String tag) throws IOException {
rout.writeByte(typeVal, tag);
writeRest(rout, tag);
}
/*
* Writes rest of the struct (excluding type value).
* As an optimization, this method is directly called by RTI
* for the top level record so that we don't write out the byte
* indicating that this is a struct (since top level records are
* always structs).
*/
void writeRest(RecordOutput rout, String tag) throws IOException {
rout.writeInt(typeInfos.size(), tag);
for (FieldTypeInfo ti : typeInfos) {
ti.write(rout, tag);
}
}
/*
* deserialize ourselves. Called by RTI.
*/
void read(RecordInput rin, String tag) throws IOException {
// number of elements
int numElems = rin.readInt(tag);
for (int i=0; i<numElems; i++) {
typeInfos.add(genericReadTypeInfo(rin, tag));
}
}
// generic reader: reads the next TypeInfo object from stream and returns it
private FieldTypeInfo genericReadTypeInfo(RecordInput rin, String tag) throws IOException {
String fieldName = rin.readString(tag);
TypeID id = genericReadTypeID(rin, tag);
return new FieldTypeInfo(fieldName, id);
}
// generic reader: reads the next TypeID object from stream and returns it
private TypeID genericReadTypeID(RecordInput rin, String tag) throws IOException {
byte typeVal = rin.readByte(tag);
switch (typeVal) {
case TypeID.RIOType.BOOL:
return TypeID.BoolTypeID;
case TypeID.RIOType.BUFFER:
return TypeID.BufferTypeID;
case TypeID.RIOType.BYTE:
return TypeID.ByteTypeID;
case TypeID.RIOType.DOUBLE:
return TypeID.DoubleTypeID;
case TypeID.RIOType.FLOAT:
return TypeID.FloatTypeID;
case TypeID.RIOType.INT:
return TypeID.IntTypeID;
case TypeID.RIOType.LONG:
return TypeID.LongTypeID;
case TypeID.RIOType.MAP:
{
TypeID tIDKey = genericReadTypeID(rin, tag);
TypeID tIDValue = genericReadTypeID(rin, tag);
return new MapTypeID(tIDKey, tIDValue);
}
case TypeID.RIOType.STRING:
return TypeID.StringTypeID;
case TypeID.RIOType.STRUCT:
{
StructTypeID stID = new StructTypeID();
int numElems = rin.readInt(tag);
for (int i=0; i<numElems; i++) {
stID.add(genericReadTypeInfo(rin, tag));
}
return stID;
}
case TypeID.RIOType.VECTOR:
{
TypeID tID = genericReadTypeID(rin, tag);
return new VectorTypeID(tID);
}
default:
// shouldn't be here
throw new IOException("Unknown type read");
}
}
@Override
public boolean equals(Object o) {
return super.equals(o);
}
@Override
public int hashCode() { return super.hashCode(); }
}

View File

@ -0,0 +1,117 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.meta;
import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.record.RecordOutput;
/**
* Represents typeID for basic types.
*
* @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class TypeID {
/**
* constants representing the IDL types we support
*/
public static final class RIOType {
public static final byte BOOL = 1;
public static final byte BUFFER = 2;
public static final byte BYTE = 3;
public static final byte DOUBLE = 4;
public static final byte FLOAT = 5;
public static final byte INT = 6;
public static final byte LONG = 7;
public static final byte MAP = 8;
public static final byte STRING = 9;
public static final byte STRUCT = 10;
public static final byte VECTOR = 11;
}
/**
* Constant classes for the basic types, so we can share them.
*/
public static final TypeID BoolTypeID = new TypeID(RIOType.BOOL);
public static final TypeID BufferTypeID = new TypeID(RIOType.BUFFER);
public static final TypeID ByteTypeID = new TypeID(RIOType.BYTE);
public static final TypeID DoubleTypeID = new TypeID(RIOType.DOUBLE);
public static final TypeID FloatTypeID = new TypeID(RIOType.FLOAT);
public static final TypeID IntTypeID = new TypeID(RIOType.INT);
public static final TypeID LongTypeID = new TypeID(RIOType.LONG);
public static final TypeID StringTypeID = new TypeID(RIOType.STRING);
protected byte typeVal;
/**
* Create a TypeID object
*/
TypeID(byte typeVal) {
this.typeVal = typeVal;
}
/**
* Get the type value. One of the constants in RIOType.
*/
public byte getTypeVal() {
return typeVal;
}
/**
* Serialize the TypeID object
*/
void write(RecordOutput rout, String tag) throws IOException {
rout.writeByte(typeVal, tag);
}
/**
* Two base typeIDs are equal if they refer to the same type
*/
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null)
return false;
if (this.getClass() != o.getClass())
return false;
TypeID oTypeID = (TypeID) o;
return (this.typeVal == oTypeID.typeVal);
}
/**
* We use a basic hashcode implementation, since this class will likely not
* be used as a hashmap key
*/
@Override
public int hashCode() {
// See 'Effectve Java' by Joshua Bloch
return 37*17+(int)typeVal;
}
}

View File

@ -0,0 +1,104 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.meta;
import java.io.IOException;
import java.util.Iterator;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.record.RecordInput;
/**
* Various utility functions for Hadooop record I/O platform.
*
* @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class Utils {
/** Cannot create a new instance of Utils */
private Utils() {
}
/**
* read/skip bytes from stream based on a type
*/
public static void skip(RecordInput rin, String tag, TypeID typeID) throws IOException {
switch (typeID.typeVal) {
case TypeID.RIOType.BOOL:
rin.readBool(tag);
break;
case TypeID.RIOType.BUFFER:
rin.readBuffer(tag);
break;
case TypeID.RIOType.BYTE:
rin.readByte(tag);
break;
case TypeID.RIOType.DOUBLE:
rin.readDouble(tag);
break;
case TypeID.RIOType.FLOAT:
rin.readFloat(tag);
break;
case TypeID.RIOType.INT:
rin.readInt(tag);
break;
case TypeID.RIOType.LONG:
rin.readLong(tag);
break;
case TypeID.RIOType.MAP:
org.apache.hadoop.record.Index midx1 = rin.startMap(tag);
MapTypeID mtID = (MapTypeID) typeID;
for (; !midx1.done(); midx1.incr()) {
skip(rin, tag, mtID.getKeyTypeID());
skip(rin, tag, mtID.getValueTypeID());
}
rin.endMap(tag);
break;
case TypeID.RIOType.STRING:
rin.readString(tag);
break;
case TypeID.RIOType.STRUCT:
rin.startRecord(tag);
// read past each field in the struct
StructTypeID stID = (StructTypeID) typeID;
Iterator<FieldTypeInfo> it = stID.getFieldTypeInfos().iterator();
while (it.hasNext()) {
FieldTypeInfo tInfo = it.next();
skip(rin, tag, tInfo.getTypeID());
}
rin.endRecord(tag);
break;
case TypeID.RIOType.VECTOR:
org.apache.hadoop.record.Index vidx1 = rin.startVector(tag);
VectorTypeID vtID = (VectorTypeID) typeID;
for (; !vidx1.done(); vidx1.incr()) {
skip(rin, tag, vtID.getElementTypeID());
}
rin.endVector(tag);
break;
default:
// shouldn't be here
throw new IOException("Unknown typeID when skipping bytes");
}
}
}

View File

@ -0,0 +1,74 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.record.meta;
import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.record.RecordOutput;
/**
* Represents typeID for vector.
* @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class VectorTypeID extends TypeID {
private TypeID typeIDElement;
public VectorTypeID(TypeID typeIDElement) {
super(RIOType.VECTOR);
this.typeIDElement = typeIDElement;
}
public TypeID getElementTypeID() {
return this.typeIDElement;
}
@Override
void write(RecordOutput rout, String tag) throws IOException {
rout.writeByte(typeVal, tag);
typeIDElement.write(rout, tag);
}
/**
* Two vector typeIDs are equal if their constituent elements have the
* same type
*/
@Override
public boolean equals(Object o) {
if (!super.equals (o))
return false;
VectorTypeID vti = (VectorTypeID) o;
return this.typeIDElement.equals(vti.typeIDElement);
}
/**
* We use a basic hashcode implementation, since this class will likely not
* be used as a hashmap key
*/
@Override
public int hashCode() {
return 37*17+typeIDElement.hashCode();
}
}