(f.getName(), f.getType().getCppType()));
+ }
+ }
+
+ @Override
+ String getTypeIDObjectString() {
+ return "new ::hadoop::StructTypeID(" +
+ fullName + "::getTypeInfo().getFieldTypeInfos())";
+ }
+
+ String genDecl(String fname) {
+ return " "+name+" "+fname+";\n";
+ }
+
+ @Override
+ void genSetRTIFilter(CodeBuffer cb) {
+ // we set the RTI filter here
+ cb.append(fullName + "::setTypeFilter(rti.getNestedStructTypeInfo(\""+
+ name + "\"));\n");
+ }
+
+ void genSetupRTIFields(CodeBuffer cb) {
+ cb.append("void " + fullName + "::setupRtiFields() {\n");
+ cb.append("if (NULL == p" + Consts.RTI_FILTER + ") return;\n");
+ cb.append("if (NULL != p" + Consts.RTI_FILTER_FIELDS + ") return;\n");
+ cb.append("p" + Consts.RTI_FILTER_FIELDS + " = new int[p" +
+ Consts.RTI_FILTER + "->getFieldTypeInfos().size()];\n");
+ cb.append("for (unsigned int " + Consts.RIO_PREFIX + "i=0; " +
+ Consts.RIO_PREFIX + "igetFieldTypeInfos().size(); " + Consts.RIO_PREFIX + "i++) {\n");
+ cb.append("p" + Consts.RTI_FILTER_FIELDS + "[" + Consts.RIO_PREFIX +
+ "i] = 0;\n");
+ cb.append("}\n");
+ cb.append("for (unsigned int " + Consts.RIO_PREFIX + "i=0; " +
+ Consts.RIO_PREFIX + "i
getFieldTypeInfos().size(); " + Consts.RIO_PREFIX + "i++) {\n");
+ cb.append("for (unsigned int " + Consts.RIO_PREFIX + "j=0; " +
+ Consts.RIO_PREFIX + "j
getFieldTypeInfos().size(); " + Consts.RIO_PREFIX + "j++) {\n");
+ cb.append("if (*(p" + Consts.RTI_FILTER + "->getFieldTypeInfos()[" +
+ Consts.RIO_PREFIX + "i]) == *(p" + Consts.RTI_VAR +
+ "->getFieldTypeInfos()[" + Consts.RIO_PREFIX + "j])) {\n");
+ cb.append("p" + Consts.RTI_FILTER_FIELDS + "[" + Consts.RIO_PREFIX +
+ "i] = " + Consts.RIO_PREFIX + "j+1;\n");
+ cb.append("break;\n");
+ cb.append("}\n");
+ cb.append("}\n");
+ cb.append("}\n");
+ cb.append("}\n");
+ }
+
+ void genCode(FileWriter hh, FileWriter cc, ArrayList options)
+ throws IOException {
+ CodeBuffer hb = new CodeBuffer();
+
+ String[] ns = module.split("::");
+ for (int i = 0; i < ns.length; i++) {
+ hb.append("namespace "+ns[i]+" {\n");
+ }
+
+ hb.append("class "+name+" : public ::hadoop::Record {\n");
+ hb.append("private:\n");
+
+ for (Iterator> i = fields.iterator(); i.hasNext();) {
+ JField jf = i.next();
+ String name = jf.getName();
+ CppType type = jf.getType();
+ type.genDecl(hb, name);
+ }
+
+ // type info vars
+ hb.append("static ::hadoop::RecordTypeInfo* p" + Consts.RTI_VAR + ";\n");
+ hb.append("static ::hadoop::RecordTypeInfo* p" + Consts.RTI_FILTER + ";\n");
+ hb.append("static int* p" + Consts.RTI_FILTER_FIELDS + ";\n");
+ hb.append("static ::hadoop::RecordTypeInfo* setupTypeInfo();\n");
+ hb.append("static void setupRtiFields();\n");
+ hb.append("virtual void deserializeWithoutFilter(::hadoop::IArchive& " +
+ Consts.RECORD_INPUT + ", const char* " + Consts.TAG + ");\n");
+ hb.append("public:\n");
+ hb.append("static const ::hadoop::RecordTypeInfo& getTypeInfo() " +
+ "{return *p" + Consts.RTI_VAR + ";}\n");
+ hb.append("static void setTypeFilter(const ::hadoop::RecordTypeInfo& rti);\n");
+ hb.append("static void setTypeFilter(const ::hadoop::RecordTypeInfo* prti);\n");
+ hb.append("virtual void serialize(::hadoop::OArchive& " +
+ Consts.RECORD_OUTPUT + ", const char* " + Consts.TAG + ") const;\n");
+ hb.append("virtual void deserialize(::hadoop::IArchive& " +
+ Consts.RECORD_INPUT + ", const char* " + Consts.TAG + ");\n");
+ hb.append("virtual const ::std::string& type() const;\n");
+ hb.append("virtual const ::std::string& signature() const;\n");
+ hb.append("virtual bool operator<(const "+name+"& peer_) const;\n");
+ hb.append("virtual bool operator==(const "+name+"& peer_) const;\n");
+ hb.append("virtual ~"+name+"() {};\n");
+ for (Iterator> i = fields.iterator(); i.hasNext();) {
+ JField jf = i.next();
+ String name = jf.getName();
+ CppType type = jf.getType();
+ type.genGetSet(hb, name);
+ }
+ hb.append("}; // end record "+name+"\n");
+ for (int i=ns.length-1; i>=0; i--) {
+ hb.append("} // end namespace "+ns[i]+"\n");
+ }
+
+ hh.write(hb.toString());
+
+ CodeBuffer cb = new CodeBuffer();
+
+ // initialize type info vars
+ cb.append("::hadoop::RecordTypeInfo* " + fullName + "::p" +
+ Consts.RTI_VAR + " = " + fullName + "::setupTypeInfo();\n");
+ cb.append("::hadoop::RecordTypeInfo* " + fullName + "::p" +
+ Consts.RTI_FILTER + " = NULL;\n");
+ cb.append("int* " + fullName + "::p" +
+ Consts.RTI_FILTER_FIELDS + " = NULL;\n\n");
+
+ // setupTypeInfo()
+ cb.append("::hadoop::RecordTypeInfo* "+fullName+"::setupTypeInfo() {\n");
+ cb.append("::hadoop::RecordTypeInfo* p = new ::hadoop::RecordTypeInfo(\"" +
+ name + "\");\n");
+ for (Iterator> i = fields.iterator(); i.hasNext();) {
+ JField jf = i.next();
+ String name = jf.getName();
+ CppType type = jf.getType();
+ type.genStaticTypeInfo(cb, name);
+ }
+ cb.append("return p;\n");
+ cb.append("}\n");
+
+ // setTypeFilter()
+ cb.append("void "+fullName+"::setTypeFilter(const " +
+ "::hadoop::RecordTypeInfo& rti) {\n");
+ cb.append("if (NULL != p" + Consts.RTI_FILTER + ") {\n");
+ cb.append("delete p" + Consts.RTI_FILTER + ";\n");
+ cb.append("}\n");
+ cb.append("p" + Consts.RTI_FILTER + " = new ::hadoop::RecordTypeInfo(rti);\n");
+ cb.append("if (NULL != p" + Consts.RTI_FILTER_FIELDS + ") {\n");
+ cb.append("delete p" + Consts.RTI_FILTER_FIELDS + ";\n");
+ cb.append("}\n");
+ cb.append("p" + Consts.RTI_FILTER_FIELDS + " = NULL;\n");
+ // set RTIFilter for nested structs. We may end up with multiple lines that
+ // do the same thing, if the same struct is nested in more than one field,
+ // but that's OK.
+ for (Iterator> i = fields.iterator(); i.hasNext();) {
+ JField jf = i.next();
+ CppType type = jf.getType();
+ type.genSetRTIFilter(cb);
+ }
+ cb.append("}\n");
+
+ // setTypeFilter()
+ cb.append("void "+fullName+"::setTypeFilter(const " +
+ "::hadoop::RecordTypeInfo* prti) {\n");
+ cb.append("if (NULL != prti) {\n");
+ cb.append("setTypeFilter(*prti);\n");
+ cb.append("}\n");
+ cb.append("}\n");
+
+ // setupRtiFields()
+ genSetupRTIFields(cb);
+
+ // serialize()
+ cb.append("void "+fullName+"::serialize(::hadoop::OArchive& " +
+ Consts.RECORD_OUTPUT + ", const char* " + Consts.TAG + ") const {\n");
+ cb.append(Consts.RECORD_OUTPUT + ".startRecord(*this," +
+ Consts.TAG + ");\n");
+ for (Iterator> i = fields.iterator(); i.hasNext();) {
+ JField jf = i.next();
+ String name = jf.getName();
+ CppType type = jf.getType();
+ if (type instanceof JBuffer.CppBuffer) {
+ cb.append(Consts.RECORD_OUTPUT + ".serialize("+name+","+name+
+ ".length(),\""+name+"\");\n");
+ } else {
+ cb.append(Consts.RECORD_OUTPUT + ".serialize("+name+",\""+
+ name+"\");\n");
+ }
+ }
+ cb.append(Consts.RECORD_OUTPUT + ".endRecord(*this," + Consts.TAG + ");\n");
+ cb.append("return;\n");
+ cb.append("}\n");
+
+ // deserializeWithoutFilter()
+ cb.append("void "+fullName+"::deserializeWithoutFilter(::hadoop::IArchive& " +
+ Consts.RECORD_INPUT + ", const char* " + Consts.TAG + ") {\n");
+ cb.append(Consts.RECORD_INPUT + ".startRecord(*this," +
+ Consts.TAG + ");\n");
+ for (Iterator> i = fields.iterator(); i.hasNext();) {
+ JField jf = i.next();
+ String name = jf.getName();
+ CppType type = jf.getType();
+ if (type instanceof JBuffer.CppBuffer) {
+ cb.append("{\nsize_t len=0; " + Consts.RECORD_INPUT + ".deserialize("+
+ name+",len,\""+name+"\");\n}\n");
+ } else {
+ cb.append(Consts.RECORD_INPUT + ".deserialize("+name+",\""+
+ name+"\");\n");
+ }
+ }
+ cb.append(Consts.RECORD_INPUT + ".endRecord(*this," + Consts.TAG + ");\n");
+ cb.append("return;\n");
+ cb.append("}\n");
+
+ // deserialize()
+ cb.append("void "+fullName+"::deserialize(::hadoop::IArchive& " +
+ Consts.RECORD_INPUT + ", const char* " + Consts.TAG + ") {\n");
+ cb.append("if (NULL == p" + Consts.RTI_FILTER + ") {\n");
+ cb.append("deserializeWithoutFilter(" + Consts.RECORD_INPUT + ", " +
+ Consts.TAG + ");\n");
+ cb.append("return;\n");
+ cb.append("}\n");
+ cb.append("// if we're here, we need to read based on version info\n");
+ cb.append(Consts.RECORD_INPUT + ".startRecord(*this," +
+ Consts.TAG + ");\n");
+ cb.append("setupRtiFields();\n");
+ cb.append("for (unsigned int " + Consts.RIO_PREFIX + "i=0; " +
+ Consts.RIO_PREFIX + "igetFieldTypeInfos().size(); " + Consts.RIO_PREFIX + "i++) {\n");
+ int ct = 0;
+ for (Iterator> i = fields.iterator(); i.hasNext();) {
+ JField jf = i.next();
+ String name = jf.getName();
+ CppType type = jf.getType();
+ ct++;
+ if (1 != ct) {
+ cb.append("else ");
+ }
+ cb.append("if (" + ct + " == p" + Consts.RTI_FILTER_FIELDS + "[" +
+ Consts.RIO_PREFIX + "i]) {\n");
+ if (type instanceof JBuffer.CppBuffer) {
+ cb.append("{\nsize_t len=0; " + Consts.RECORD_INPUT + ".deserialize("+
+ name+",len,\""+name+"\");\n}\n");
+ } else {
+ cb.append(Consts.RECORD_INPUT + ".deserialize("+name+",\""+
+ name+"\");\n");
+ }
+ cb.append("}\n");
+ }
+ if (0 != ct) {
+ cb.append("else {\n");
+ cb.append("const std::vector< ::hadoop::FieldTypeInfo* >& typeInfos = p" +
+ Consts.RTI_FILTER + "->getFieldTypeInfos();\n");
+ cb.append("::hadoop::Utils::skip(" + Consts.RECORD_INPUT +
+ ", typeInfos[" + Consts.RIO_PREFIX + "i]->getFieldID()->c_str()" +
+ ", *(typeInfos[" + Consts.RIO_PREFIX + "i]->getTypeID()));\n");
+ cb.append("}\n");
+ }
+ cb.append("}\n");
+ cb.append(Consts.RECORD_INPUT + ".endRecord(*this, " + Consts.TAG+");\n");
+ cb.append("}\n");
+
+ // operator <
+ cb.append("bool "+fullName+"::operator< (const "+fullName+"& peer_) const {\n");
+ cb.append("return (1\n");
+ for (Iterator> i = fields.iterator(); i.hasNext();) {
+ JField jf = i.next();
+ String name = jf.getName();
+ cb.append("&& ("+name+" < peer_."+name+")\n");
+ }
+ cb.append(");\n");
+ cb.append("}\n");
+
+ cb.append("bool "+fullName+"::operator== (const "+fullName+"& peer_) const {\n");
+ cb.append("return (1\n");
+ for (Iterator> i = fields.iterator(); i.hasNext();) {
+ JField jf = i.next();
+ String name = jf.getName();
+ cb.append("&& ("+name+" == peer_."+name+")\n");
+ }
+ cb.append(");\n");
+ cb.append("}\n");
+
+ cb.append("const ::std::string&"+fullName+"::type() const {\n");
+ cb.append("static const ::std::string type_(\""+name+"\");\n");
+ cb.append("return type_;\n");
+ cb.append("}\n");
+
+ cb.append("const ::std::string&"+fullName+"::signature() const {\n");
+ cb.append("static const ::std::string sig_(\""+getSignature()+"\");\n");
+ cb.append("return sig_;\n");
+ cb.append("}\n");
+
+ cc.write(cb.toString());
+ }
+ }
+
+ class CRecord extends CCompType {
+
+ }
+
+ private String signature;
+
+ /**
+ * Creates a new instance of JRecord
+ */
+ public JRecord(String name, ArrayList> flist) {
+ setJavaType(new JavaRecord(name, flist));
+ setCppType(new CppRecord(name, flist));
+ setCType(new CRecord());
+ // precompute signature
+ int idx = name.lastIndexOf('.');
+ String recName = name.substring(idx+1);
+ StringBuilder sb = new StringBuilder();
+ sb.append("L").append(recName).append("(");
+ for (Iterator> i = flist.iterator(); i.hasNext();) {
+ String s = i.next().getType().getSignature();
+ sb.append(s);
+ }
+ sb.append(")");
+ signature = sb.toString();
+ }
+
+ @Override
+ String getSignature() {
+ return signature;
+ }
+
+ void genCppCode(FileWriter hh, FileWriter cc, ArrayList options)
+ throws IOException {
+ ((CppRecord)getCppType()).genCode(hh, cc, options);
+ }
+
+ void genJavaCode(String destDir, ArrayList options)
+ throws IOException {
+ ((JavaRecord)getJavaType()).genCode(destDir, options);
+ }
+}
diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/JString.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/JString.java
new file mode 100644
index 00000000000..cd3ab3dc354
--- /dev/null
+++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/JString.java
@@ -0,0 +1,96 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record.compiler;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+
+/**
+ * @deprecated Replaced by Avro.
+ */
+@Deprecated
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public class JString extends JCompType {
+
+ class JavaString extends JavaCompType {
+
+ JavaString() {
+ super("String", "String", "String", "TypeID.RIOType.STRING");
+ }
+
+ @Override
+ String getTypeIDObjectString() {
+ return "org.apache.hadoop.record.meta.TypeID.StringTypeID";
+ }
+
+ @Override
+ void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
+ cb.append("{\n");
+ cb.append("int i = org.apache.hadoop.record.Utils.readVInt("+b+", "+s+");\n");
+ cb.append("int z = org.apache.hadoop.record.Utils.getVIntSize(i);\n");
+ cb.append(s+"+=(z+i); "+l+"-= (z+i);\n");
+ cb.append("}\n");
+ }
+
+ @Override
+ void genCompareBytes(CodeBuffer cb) {
+ cb.append("{\n");
+ cb.append("int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1);\n");
+ cb.append("int i2 = org.apache.hadoop.record.Utils.readVInt(b2, s2);\n");
+ cb.append("int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);\n");
+ cb.append("int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);\n");
+ cb.append("s1+=z1; s2+=z2; l1-=z1; l2-=z2;\n");
+ cb.append("int r1 = org.apache.hadoop.record.Utils.compareBytes(b1,s1,i1,b2,s2,i2);\n");
+ cb.append("if (r1 != 0) { return (r1<0)?-1:0; }\n");
+ cb.append("s1+=i1; s2+=i2; l1-=i1; l1-=i2;\n");
+ cb.append("}\n");
+ }
+
+ @Override
+ void genClone(CodeBuffer cb, String fname) {
+ cb.append(Consts.RIO_PREFIX + "other."+fname+" = this."+fname+";\n");
+ }
+ }
+
+ class CppString extends CppCompType {
+
+ CppString() {
+ super("::std::string");
+ }
+
+ @Override
+ String getTypeIDObjectString() {
+ return "new ::hadoop::TypeID(::hadoop::RIOTYPE_STRING)";
+ }
+ }
+
+ /** Creates a new instance of JString */
+ public JString() {
+ setJavaType(new JavaString());
+ setCppType(new CppString());
+ setCType(new CCompType());
+ }
+
+ @Override
+ String getSignature() {
+ return "s";
+ }
+}
diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/JType.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/JType.java
new file mode 100644
index 00000000000..b9a007b4df3
--- /dev/null
+++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/JType.java
@@ -0,0 +1,230 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record.compiler;
+
+import java.util.Map;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+
+/**
+ * Abstract Base class for all types supported by Hadoop Record I/O.
+ *
+ * @deprecated Replaced by Avro.
+ */
+@Deprecated
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+abstract public class JType {
+
+ static String toCamelCase(String name) {
+ char firstChar = name.charAt(0);
+ if (Character.isLowerCase(firstChar)) {
+ return ""+Character.toUpperCase(firstChar) + name.substring(1);
+ }
+ return name;
+ }
+
+ JavaType javaType;
+ CppType cppType;
+ CType cType;
+
+ abstract class JavaType {
+ private String name;
+ private String methodSuffix;
+ private String wrapper;
+ private String typeIDByteString; // points to TypeID.RIOType
+
+ JavaType(String javaname,
+ String suffix,
+ String wrapper,
+ String typeIDByteString) {
+ this.name = javaname;
+ this.methodSuffix = suffix;
+ this.wrapper = wrapper;
+ this.typeIDByteString = typeIDByteString;
+ }
+
+ void genDecl(CodeBuffer cb, String fname) {
+ cb.append("private "+name+" "+fname+";\n");
+ }
+
+ void genStaticTypeInfo(CodeBuffer cb, String fname) {
+ cb.append(Consts.RTI_VAR + ".addField(\"" + fname + "\", " +
+ getTypeIDObjectString() + ");\n");
+ }
+
+ abstract String getTypeIDObjectString();
+
+ void genSetRTIFilter(CodeBuffer cb, Map nestedStructMap) {
+ // do nothing by default
+ return;
+ }
+
+ /*void genRtiFieldCondition(CodeBuffer cb, String fname, int ct) {
+ cb.append("if ((tInfo.fieldID.equals(\"" + fname + "\")) && (typeVal ==" +
+ " org.apache.hadoop.record.meta." + getTypeIDByteString() + ")) {\n");
+ cb.append("rtiFilterFields[i] = " + ct + ";\n");
+ cb.append("}\n");
+ }
+
+ void genRtiNestedFieldCondition(CodeBuffer cb, String varName, int ct) {
+ cb.append("if (" + varName + ".getElementTypeID().getTypeVal() == " +
+ "org.apache.hadoop.record.meta." + getTypeIDByteString() +
+ ") {\n");
+ cb.append("rtiFilterFields[i] = " + ct + ";\n");
+ cb.append("}\n");
+ }*/
+
+ void genConstructorParam(CodeBuffer cb, String fname) {
+ cb.append("final "+name+" "+fname);
+ }
+
+ void genGetSet(CodeBuffer cb, String fname) {
+ cb.append("public "+name+" get"+toCamelCase(fname)+"() {\n");
+ cb.append("return "+fname+";\n");
+ cb.append("}\n");
+ cb.append("public void set"+toCamelCase(fname)+"(final "+name+" "+fname+") {\n");
+ cb.append("this."+fname+"="+fname+";\n");
+ cb.append("}\n");
+ }
+
+ String getType() {
+ return name;
+ }
+
+ String getWrapperType() {
+ return wrapper;
+ }
+
+ String getMethodSuffix() {
+ return methodSuffix;
+ }
+
+ String getTypeIDByteString() {
+ return typeIDByteString;
+ }
+
+ void genWriteMethod(CodeBuffer cb, String fname, String tag) {
+ cb.append(Consts.RECORD_OUTPUT + ".write"+methodSuffix +
+ "("+fname+",\""+tag+"\");\n");
+ }
+
+ void genReadMethod(CodeBuffer cb, String fname, String tag, boolean decl) {
+ if (decl) {
+ cb.append(name+" "+fname+";\n");
+ }
+ cb.append(fname+"=" + Consts.RECORD_INPUT + ".read" +
+ methodSuffix+"(\""+tag+"\");\n");
+ }
+
+ void genCompareTo(CodeBuffer cb, String fname, String other) {
+ cb.append(Consts.RIO_PREFIX + "ret = ("+fname+" == "+other+")? 0 :(("+
+ fname+"<"+other+")?-1:1);\n");
+ }
+
+ abstract void genCompareBytes(CodeBuffer cb);
+
+ abstract void genSlurpBytes(CodeBuffer cb, String b, String s, String l);
+
+ void genEquals(CodeBuffer cb, String fname, String peer) {
+ cb.append(Consts.RIO_PREFIX + "ret = ("+fname+"=="+peer+");\n");
+ }
+
+ void genHashCode(CodeBuffer cb, String fname) {
+ cb.append(Consts.RIO_PREFIX + "ret = (int)"+fname+";\n");
+ }
+
+ void genConstructorSet(CodeBuffer cb, String fname) {
+ cb.append("this."+fname+" = "+fname+";\n");
+ }
+
+ void genClone(CodeBuffer cb, String fname) {
+ cb.append(Consts.RIO_PREFIX + "other."+fname+" = this."+fname+";\n");
+ }
+ }
+
+ abstract class CppType {
+ private String name;
+
+ CppType(String cppname) {
+ name = cppname;
+ }
+
+ void genDecl(CodeBuffer cb, String fname) {
+ cb.append(name+" "+fname+";\n");
+ }
+
+ void genStaticTypeInfo(CodeBuffer cb, String fname) {
+ cb.append("p->addField(new ::std::string(\"" +
+ fname + "\"), " + getTypeIDObjectString() + ");\n");
+ }
+
+ void genGetSet(CodeBuffer cb, String fname) {
+ cb.append("virtual "+name+" get"+toCamelCase(fname)+"() const {\n");
+ cb.append("return "+fname+";\n");
+ cb.append("}\n");
+ cb.append("virtual void set"+toCamelCase(fname)+"("+name+" m_) {\n");
+ cb.append(fname+"=m_;\n");
+ cb.append("}\n");
+ }
+
+ abstract String getTypeIDObjectString();
+
+ void genSetRTIFilter(CodeBuffer cb) {
+ // do nothing by default
+ return;
+ }
+
+ String getType() {
+ return name;
+ }
+ }
+
+ class CType {
+
+ }
+
+ abstract String getSignature();
+
+ void setJavaType(JavaType jType) {
+ this.javaType = jType;
+ }
+
+ JavaType getJavaType() {
+ return javaType;
+ }
+
+ void setCppType(CppType cppType) {
+ this.cppType = cppType;
+ }
+
+ CppType getCppType() {
+ return cppType;
+ }
+
+ void setCType(CType cType) {
+ this.cType = cType;
+ }
+
+ CType getCType() {
+ return cType;
+ }
+}
diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/JVector.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/JVector.java
new file mode 100644
index 00000000000..46ecbada51b
--- /dev/null
+++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/JVector.java
@@ -0,0 +1,214 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record.compiler;
+
+import java.util.Map;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * @deprecated Replaced by Avro.
+ */
+@Deprecated
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public class JVector extends JCompType {
+
+ static private int level = 0;
+
+ static private String getId(String id) { return id+getLevel(); }
+
+ static private String getLevel() { return Integer.toString(level); }
+
+ static private void incrLevel() { level++; }
+
+ static private void decrLevel() { level--; }
+
+ private JType type;
+
+ class JavaVector extends JavaCompType {
+
+ private JType.JavaType element;
+
+ JavaVector(JType.JavaType t) {
+ super("java.util.ArrayList<"+t.getWrapperType()+">",
+ "Vector", "java.util.ArrayList<"+t.getWrapperType()+">",
+ "TypeID.RIOType.VECTOR");
+ element = t;
+ }
+
+ @Override
+ String getTypeIDObjectString() {
+ return "new org.apache.hadoop.record.meta.VectorTypeID(" +
+ element.getTypeIDObjectString() + ")";
+ }
+
+ @Override
+ void genSetRTIFilter(CodeBuffer cb, Map nestedStructMap) {
+ element.genSetRTIFilter(cb, nestedStructMap);
+ }
+
+ @Override
+ void genCompareTo(CodeBuffer cb, String fname, String other) {
+ cb.append("{\n");
+ incrLevel();
+ cb.append("int "+getId(Consts.RIO_PREFIX + "len1")+" = "+fname+
+ ".size();\n");
+ cb.append("int "+getId(Consts.RIO_PREFIX + "len2")+" = "+other+
+ ".size();\n");
+ cb.append("for(int "+getId(Consts.RIO_PREFIX + "vidx")+" = 0; "+
+ getId(Consts.RIO_PREFIX + "vidx")+"<"+getId(Consts.RIO_PREFIX + "len1")+
+ " && "+getId(Consts.RIO_PREFIX + "vidx")+"<"+
+ getId(Consts.RIO_PREFIX + "len2")+"; "+
+ getId(Consts.RIO_PREFIX + "vidx")+"++) {\n");
+ cb.append(element.getType()+" "+getId(Consts.RIO_PREFIX + "e1")+
+ " = "+fname+
+ ".get("+getId(Consts.RIO_PREFIX + "vidx")+");\n");
+ cb.append(element.getType()+" "+getId(Consts.RIO_PREFIX + "e2")+
+ " = "+other+
+ ".get("+getId(Consts.RIO_PREFIX + "vidx")+");\n");
+ element.genCompareTo(cb, getId(Consts.RIO_PREFIX + "e1"),
+ getId(Consts.RIO_PREFIX + "e2"));
+ cb.append("if (" + Consts.RIO_PREFIX + "ret != 0) { return " +
+ Consts.RIO_PREFIX + "ret; }\n");
+ cb.append("}\n");
+ cb.append(Consts.RIO_PREFIX + "ret = ("+getId(Consts.RIO_PREFIX + "len1")+
+ " - "+getId(Consts.RIO_PREFIX + "len2")+");\n");
+ decrLevel();
+ cb.append("}\n");
+ }
+
+ @Override
+ void genReadMethod(CodeBuffer cb, String fname, String tag, boolean decl) {
+ if (decl) {
+ cb.append(getType()+" "+fname+";\n");
+ }
+ cb.append("{\n");
+ incrLevel();
+ cb.append("org.apache.hadoop.record.Index "+
+ getId(Consts.RIO_PREFIX + "vidx")+" = " +
+ Consts.RECORD_INPUT + ".startVector(\""+tag+"\");\n");
+ cb.append(fname+"=new "+getType()+"();\n");
+ cb.append("for (; !"+getId(Consts.RIO_PREFIX + "vidx")+".done(); " +
+ getId(Consts.RIO_PREFIX + "vidx")+".incr()) {\n");
+ element.genReadMethod(cb, getId(Consts.RIO_PREFIX + "e"),
+ getId(Consts.RIO_PREFIX + "e"), true);
+ cb.append(fname+".add("+getId(Consts.RIO_PREFIX + "e")+");\n");
+ cb.append("}\n");
+ cb.append(Consts.RECORD_INPUT + ".endVector(\""+tag+"\");\n");
+ decrLevel();
+ cb.append("}\n");
+ }
+
+ @Override
+ void genWriteMethod(CodeBuffer cb, String fname, String tag) {
+ cb.append("{\n");
+ incrLevel();
+ cb.append(Consts.RECORD_OUTPUT + ".startVector("+fname+",\""+tag+"\");\n");
+ cb.append("int "+getId(Consts.RIO_PREFIX + "len")+" = "+fname+".size();\n");
+ cb.append("for(int "+getId(Consts.RIO_PREFIX + "vidx")+" = 0; " +
+ getId(Consts.RIO_PREFIX + "vidx")+"<"+getId(Consts.RIO_PREFIX + "len")+
+ "; "+getId(Consts.RIO_PREFIX + "vidx")+"++) {\n");
+ cb.append(element.getType()+" "+getId(Consts.RIO_PREFIX + "e")+" = "+
+ fname+".get("+getId(Consts.RIO_PREFIX + "vidx")+");\n");
+ element.genWriteMethod(cb, getId(Consts.RIO_PREFIX + "e"),
+ getId(Consts.RIO_PREFIX + "e"));
+ cb.append("}\n");
+ cb.append(Consts.RECORD_OUTPUT + ".endVector("+fname+",\""+tag+"\");\n");
+ cb.append("}\n");
+ decrLevel();
+ }
+
+ @Override
+ void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
+ cb.append("{\n");
+ incrLevel();
+ cb.append("int "+getId("vi")+
+ " = org.apache.hadoop.record.Utils.readVInt("+b+", "+s+");\n");
+ cb.append("int "+getId("vz")+
+ " = org.apache.hadoop.record.Utils.getVIntSize("+getId("vi")+");\n");
+ cb.append(s+"+="+getId("vz")+"; "+l+"-="+getId("vz")+";\n");
+ cb.append("for (int "+getId("vidx")+" = 0; "+getId("vidx")+
+ " < "+getId("vi")+"; "+getId("vidx")+"++)");
+ element.genSlurpBytes(cb, b, s, l);
+ decrLevel();
+ cb.append("}\n");
+ }
+
+ @Override
+ void genCompareBytes(CodeBuffer cb) {
+ cb.append("{\n");
+ incrLevel();
+ cb.append("int "+getId("vi1")+
+ " = org.apache.hadoop.record.Utils.readVInt(b1, s1);\n");
+ cb.append("int "+getId("vi2")+
+ " = org.apache.hadoop.record.Utils.readVInt(b2, s2);\n");
+ cb.append("int "+getId("vz1")+
+ " = org.apache.hadoop.record.Utils.getVIntSize("+getId("vi1")+");\n");
+ cb.append("int "+getId("vz2")+
+ " = org.apache.hadoop.record.Utils.getVIntSize("+getId("vi2")+");\n");
+ cb.append("s1+="+getId("vz1")+"; s2+="+getId("vz2")+
+ "; l1-="+getId("vz1")+"; l2-="+getId("vz2")+";\n");
+ cb.append("for (int "+getId("vidx")+" = 0; "+getId("vidx")+
+ " < "+getId("vi1")+" && "+getId("vidx")+" < "+getId("vi2")+
+ "; "+getId("vidx")+"++)");
+ element.genCompareBytes(cb);
+ cb.append("if ("+getId("vi1")+" != "+getId("vi2")+
+ ") { return ("+getId("vi1")+"<"+getId("vi2")+")?-1:0; }\n");
+ decrLevel();
+ cb.append("}\n");
+ }
+ }
+
+ class CppVector extends CppCompType {
+
+ private JType.CppType element;
+
+ CppVector(JType.CppType t) {
+ super("::std::vector< "+t.getType()+" >");
+ element = t;
+ }
+
+ @Override
+ String getTypeIDObjectString() {
+ return "new ::hadoop::VectorTypeID(" +
+ element.getTypeIDObjectString() + ")";
+ }
+
+ @Override
+ void genSetRTIFilter(CodeBuffer cb) {
+ element.genSetRTIFilter(cb);
+ }
+
+ }
+
+ /** Creates a new instance of JVector */
+ public JVector(JType t) {
+ type = t;
+ setJavaType(new JavaVector(t.getJavaType()));
+ setCppType(new CppVector(t.getCppType()));
+ setCType(new CCompType());
+ }
+
+ @Override
+ String getSignature() {
+ return "[" + type.getSignature() + "]";
+ }
+}
diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/JavaGenerator.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/JavaGenerator.java
new file mode 100644
index 00000000000..6d51df6cd15
--- /dev/null
+++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/JavaGenerator.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record.compiler;
+
+import java.util.ArrayList;
+import java.io.IOException;
+import java.util.Iterator;
+
+/**
+ * Java Code generator front-end for Hadoop record I/O.
+ */
+class JavaGenerator extends CodeGenerator {
+
+ JavaGenerator() {
+ }
+
+ /**
+ * Generate Java code for records. This method is only a front-end to
+ * JRecord, since one file is generated for each record.
+ *
+ * @param name possibly full pathname to the file
+ * @param ilist included files (as JFile)
+ * @param rlist List of records defined within this file
+ * @param destDir output directory
+ */
+ @Override
+ void genCode(String name, ArrayList ilist,
+ ArrayList rlist, String destDir, ArrayList options)
+ throws IOException {
+ for (Iterator iter = rlist.iterator(); iter.hasNext();) {
+ JRecord rec = iter.next();
+ rec.genJavaCode(destDir, options);
+ }
+ }
+}
diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/ant/RccTask.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/ant/RccTask.java
new file mode 100644
index 00000000000..869e0594f79
--- /dev/null
+++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/ant/RccTask.java
@@ -0,0 +1,145 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.record.compiler.ant;
+
+import java.io.File;
+import java.util.ArrayList;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.record.compiler.generated.Rcc;
+import org.apache.tools.ant.BuildException;
+import org.apache.tools.ant.DirectoryScanner;
+import org.apache.tools.ant.Project;
+import org.apache.tools.ant.Task;
+import org.apache.tools.ant.types.FileSet;
+
+/**
+ * Hadoop record compiler ant Task
+ * This task takes the given record definition files and compiles them into
+ * java or c++
+ * files. It is then up to the user to compile the generated files.
+ *
+ *
The task requires the file
or the nested fileset element to be
+ * specified. Optional attributes are language
(set the output
+ * language, default is "java"),
+ * destdir
(name of the destination directory for generated java/c++
+ * code, default is ".") and failonerror
(specifies error handling
+ * behavior. default is true).
+ *
Usage
+ *
+ * <recordcc
+ * destdir="${basedir}/gensrc"
+ * language="java">
+ * <fileset include="**\/*.jr" />
+ * </recordcc>
+ *
+ *
+ * @deprecated Replaced by Avro.
+ */
+@Deprecated
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public class RccTask extends Task {
+
+ private String language = "java";
+ private File src;
+ private File dest = new File(".");
+ private final ArrayList filesets = new ArrayList();
+ private boolean failOnError = true;
+
+ /** Creates a new instance of RccTask */
+ public RccTask() {
+ }
+
+ /**
+ * Sets the output language option
+ * @param language "java"/"c++"
+ */
+ public void setLanguage(String language) {
+ this.language = language;
+ }
+
+ /**
+ * Sets the record definition file attribute
+ * @param file record definition file
+ */
+ public void setFile(File file) {
+ this.src = file;
+ }
+
+ /**
+ * Given multiple files (via fileset), set the error handling behavior
+ * @param flag true will throw build exception in case of failure (default)
+ */
+ public void setFailonerror(boolean flag) {
+ this.failOnError = flag;
+ }
+
+ /**
+ * Sets directory where output files will be generated
+ * @param dir output directory
+ */
+ public void setDestdir(File dir) {
+ this.dest = dir;
+ }
+
+ /**
+ * Adds a fileset that can consist of one or more files
+ * @param set Set of record definition files
+ */
+ public void addFileset(FileSet set) {
+ filesets.add(set);
+ }
+
+ /**
+ * Invoke the Hadoop record compiler on each record definition file
+ */
+ @Override
+ public void execute() throws BuildException {
+ if (src == null && filesets.size()==0) {
+ throw new BuildException("There must be a file attribute or a fileset child element");
+ }
+ if (src != null) {
+ doCompile(src);
+ }
+ Project myProject = getProject();
+ for (int i = 0; i < filesets.size(); i++) {
+ FileSet fs = filesets.get(i);
+ DirectoryScanner ds = fs.getDirectoryScanner(myProject);
+ File dir = fs.getDir(myProject);
+ String[] srcs = ds.getIncludedFiles();
+ for (int j = 0; j < srcs.length; j++) {
+ doCompile(new File(dir, srcs[j]));
+ }
+ }
+ }
+
+ private void doCompile(File file) throws BuildException {
+ String[] args = new String[5];
+ args[0] = "--language";
+ args[1] = this.language;
+ args[2] = "--destdir";
+ args[3] = this.dest.getPath();
+ args[4] = file.getPath();
+ int retVal = Rcc.driver(args);
+ if (retVal != 0 && failOnError) {
+ throw new BuildException("Hadoop record compiler returned error code "+retVal);
+ }
+ }
+}
diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/generated/ParseException.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/generated/ParseException.java
new file mode 100644
index 00000000000..3af5910ccb4
--- /dev/null
+++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/generated/ParseException.java
@@ -0,0 +1,219 @@
+/* Generated By:JavaCC: Do not edit this line. ParseException.java Version 3.0 */
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record.compiler.generated;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * This exception is thrown when parse errors are encountered.
+ * You can explicitly create objects of this exception type by
+ * calling the method generateParseException in the generated
+ * parser.
+ *
+ * You can modify this class to customize your error reporting
+ * mechanisms so long as you retain the public fields.
+ *
+ * @deprecated Replaced by Avro.
+ */
+@Deprecated
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public class ParseException extends Exception {
+
+ /**
+ * This constructor is used by the method "generateParseException"
+ * in the generated parser. Calling this constructor generates
+ * a new object of this type with the fields "currentToken",
+ * "expectedTokenSequences", and "tokenImage" set. The boolean
+ * flag "specialConstructor" is also set to true to indicate that
+ * this constructor was used to create this object.
+ * This constructor calls its super class with the empty string
+ * to force the "toString" method of parent class "Throwable" to
+ * print the error message in the form:
+ * ParseException:
+ */
+ public ParseException(Token currentTokenVal,
+ int[][] expectedTokenSequencesVal,
+ String[] tokenImageVal
+ )
+ {
+ super("");
+ specialConstructor = true;
+ currentToken = currentTokenVal;
+ expectedTokenSequences = expectedTokenSequencesVal;
+ tokenImage = tokenImageVal;
+ }
+
+ /**
+ * The following constructors are for use by you for whatever
+ * purpose you can think of. Constructing the exception in this
+ * manner makes the exception behave in the normal way - i.e., as
+ * documented in the class "Throwable". The fields "errorToken",
+ * "expectedTokenSequences", and "tokenImage" do not contain
+ * relevant information. The JavaCC generated code does not use
+ * these constructors.
+ */
+
+ public ParseException() {
+ super();
+ specialConstructor = false;
+ }
+
+ public ParseException(String message) {
+ super(message);
+ specialConstructor = false;
+ }
+
+ /**
+ * This variable determines which constructor was used to create
+ * this object and thereby affects the semantics of the
+ * "getMessage" method (see below).
+ */
+ protected boolean specialConstructor;
+
+ /**
+ * This is the last token that has been consumed successfully. If
+ * this object has been created due to a parse error, the token
+ * followng this token will (therefore) be the first error token.
+ */
+ public Token currentToken;
+
+ /**
+ * Each entry in this array is an array of integers. Each array
+ * of integers represents a sequence of tokens (by their ordinal
+ * values) that is expected at this point of the parse.
+ */
+ public int[][] expectedTokenSequences;
+
+ /**
+ * This is a reference to the "tokenImage" array of the generated
+ * parser within which the parse error occurred. This array is
+ * defined in the generated ...Constants interface.
+ */
+ public String[] tokenImage;
+
+ /**
+ * This method has the standard behavior when this object has been
+ * created using the standard constructors. Otherwise, it uses
+ * "currentToken" and "expectedTokenSequences" to generate a parse
+ * error message and returns it. If this object has been created
+ * due to a parse error, and you do not catch it (it gets thrown
+ * from the parser), then this method is called during the printing
+ * of the final stack trace, and hence the correct error message
+ * gets displayed.
+ */
+ @Override
+ public String getMessage() {
+ if (!specialConstructor) {
+ return super.getMessage();
+ }
+ StringBuffer expected = new StringBuffer();
+ int maxSize = 0;
+ for (int i = 0; i < expectedTokenSequences.length; i++) {
+ if (maxSize < expectedTokenSequences[i].length) {
+ maxSize = expectedTokenSequences[i].length;
+ }
+ for (int j = 0; j < expectedTokenSequences[i].length; j++) {
+ expected.append(tokenImage[expectedTokenSequences[i][j]]).append(" ");
+ }
+ if (expectedTokenSequences[i][expectedTokenSequences[i].length - 1] != 0) {
+ expected.append("...");
+ }
+ expected.append(eol).append(" ");
+ }
+ String retval = "Encountered \"";
+ Token tok = currentToken.next;
+ for (int i = 0; i < maxSize; i++) {
+ if (i != 0) retval += " ";
+ if (tok.kind == 0) {
+ retval += tokenImage[0];
+ break;
+ }
+ retval += add_escapes(tok.image);
+ tok = tok.next;
+ }
+ retval += "\" at line " + currentToken.next.beginLine + ", column " + currentToken.next.beginColumn;
+ retval += "." + eol;
+ if (expectedTokenSequences.length == 1) {
+ retval += "Was expecting:" + eol + " ";
+ } else {
+ retval += "Was expecting one of:" + eol + " ";
+ }
+ retval += expected.toString();
+ return retval;
+ }
+
+ /**
+ * The end of line string for this machine.
+ */
+ protected String eol = System.getProperty("line.separator", "\n");
+
+ /**
+ * Used to convert raw characters to their escaped version
+ * when these raw version cannot be used as part of an ASCII
+ * string literal.
+ */
+ protected String add_escapes(String str) {
+ StringBuffer retval = new StringBuffer();
+ char ch;
+ for (int i = 0; i < str.length(); i++) {
+ switch (str.charAt(i))
+ {
+ case 0 :
+ continue;
+ case '\b':
+ retval.append("\\b");
+ continue;
+ case '\t':
+ retval.append("\\t");
+ continue;
+ case '\n':
+ retval.append("\\n");
+ continue;
+ case '\f':
+ retval.append("\\f");
+ continue;
+ case '\r':
+ retval.append("\\r");
+ continue;
+ case '\"':
+ retval.append("\\\"");
+ continue;
+ case '\'':
+ retval.append("\\\'");
+ continue;
+ case '\\':
+ retval.append("\\\\");
+ continue;
+ default:
+ if ((ch = str.charAt(i)) < 0x20 || ch > 0x7e) {
+ String s = "0000" + Integer.toString(ch, 16);
+ retval.append("\\u" + s.substring(s.length() - 4, s.length()));
+ } else {
+ retval.append(ch);
+ }
+ continue;
+ }
+ }
+ return retval.toString();
+ }
+
+}
diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/generated/Rcc.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/generated/Rcc.java
new file mode 100644
index 00000000000..c4c74cd6516
--- /dev/null
+++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/generated/Rcc.java
@@ -0,0 +1,542 @@
+/* Generated By:JavaCC: Do not edit this line. Rcc.java */
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record.compiler.generated;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.record.compiler.*;
+import java.util.ArrayList;
+import java.util.Hashtable;
+import java.io.File;
+import java.io.FileReader;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+
+/**
+ * @deprecated Replaced by Avro.
+ */
+@Deprecated
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public class Rcc implements RccConstants {
+ private static String language = "java";
+ private static String destDir = ".";
+ private static ArrayList recFiles = new ArrayList();
+ private static ArrayList cmdargs = new ArrayList();
+ private static JFile curFile;
+ private static Hashtable recTab;
+ private static String curDir = ".";
+ private static String curFileName;
+ private static String curModuleName;
+
+ public static void main(String[] args) {
+ System.exit(driver(args));
+ }
+
+ public static void usage() {
+ System.err.println("Usage: rcc --language [java|c++] ddl-files");
+ }
+
+ public static int driver(String[] args) {
+ for (int i=0; i();
+ curFile = parser.Input();
+ } catch (ParseException e) {
+ System.err.println(e.toString());
+ return 1;
+ }
+ try {
+ reader.close();
+ } catch (IOException e) {
+ }
+ } catch (FileNotFoundException e) {
+ System.err.println("File " + recFiles.get(i) +
+ " Not found.");
+ return 1;
+ }
+ try {
+ int retCode = curFile.genCode(language, destDir, cmdargs);
+ if (retCode != 0) { return retCode; }
+ } catch (IOException e) {
+ System.err.println(e.toString());
+ return 1;
+ }
+ }
+ return 0;
+ }
+
+ final public JFile Input() throws ParseException {
+ ArrayList ilist = new ArrayList();
+ ArrayList rlist = new ArrayList();
+ JFile i;
+ ArrayList l;
+ label_1:
+ while (true) {
+ switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
+ case INCLUDE_TKN:
+ i = Include();
+ ilist.add(i);
+ break;
+ case MODULE_TKN:
+ l = Module();
+ rlist.addAll(l);
+ break;
+ default:
+ jj_la1[0] = jj_gen;
+ jj_consume_token(-1);
+ throw new ParseException();
+ }
+ switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
+ case MODULE_TKN:
+ case INCLUDE_TKN:
+ ;
+ break;
+ default:
+ jj_la1[1] = jj_gen;
+ break label_1;
+ }
+ }
+ jj_consume_token(0);
+ {if (true) return new JFile(curFileName, ilist, rlist);}
+ throw new Error("Missing return statement in function");
+ }
+
+ final public JFile Include() throws ParseException {
+ String fname;
+ Token t;
+ jj_consume_token(INCLUDE_TKN);
+ t = jj_consume_token(CSTRING_TKN);
+ JFile ret = null;
+ fname = t.image.replaceAll("^\"", "").replaceAll("\"$","");
+ File file = new File(curDir, fname);
+ String tmpDir = curDir;
+ String tmpFile = curFileName;
+ curDir = file.getParent();
+ curFileName = file.getName();
+ try {
+ FileReader reader = new FileReader(file);
+ Rcc parser = new Rcc(reader);
+ try {
+ ret = parser.Input();
+ System.out.println(fname + " Parsed Successfully");
+ } catch (ParseException e) {
+ System.out.println(e.toString());
+ System.exit(1);
+ }
+ try {
+ reader.close();
+ } catch (IOException e) {
+ }
+ } catch (FileNotFoundException e) {
+ System.out.println("File " + fname +
+ " Not found.");
+ System.exit(1);
+ }
+ curDir = tmpDir;
+ curFileName = tmpFile;
+ {if (true) return ret;}
+ throw new Error("Missing return statement in function");
+ }
+
+ final public ArrayList Module() throws ParseException {
+ String mName;
+ ArrayList rlist;
+ jj_consume_token(MODULE_TKN);
+ mName = ModuleName();
+ curModuleName = mName;
+ jj_consume_token(LBRACE_TKN);
+ rlist = RecordList();
+ jj_consume_token(RBRACE_TKN);
+ {if (true) return rlist;}
+ throw new Error("Missing return statement in function");
+ }
+
+ final public String ModuleName() throws ParseException {
+ String name = "";
+ Token t;
+ t = jj_consume_token(IDENT_TKN);
+ name += t.image;
+ label_2:
+ while (true) {
+ switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
+ case DOT_TKN:
+ ;
+ break;
+ default:
+ jj_la1[2] = jj_gen;
+ break label_2;
+ }
+ jj_consume_token(DOT_TKN);
+ t = jj_consume_token(IDENT_TKN);
+ name += "." + t.image;
+ }
+ {if (true) return name;}
+ throw new Error("Missing return statement in function");
+ }
+
+ final public ArrayList RecordList() throws ParseException {
+ ArrayList rlist = new ArrayList();
+ JRecord r;
+ label_3:
+ while (true) {
+ r = Record();
+ rlist.add(r);
+ switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
+ case RECORD_TKN:
+ ;
+ break;
+ default:
+ jj_la1[3] = jj_gen;
+ break label_3;
+ }
+ }
+ {if (true) return rlist;}
+ throw new Error("Missing return statement in function");
+ }
+
+ final public JRecord Record() throws ParseException {
+ String rname;
+ ArrayList> flist = new ArrayList>();
+ Token t;
+ JField f;
+ jj_consume_token(RECORD_TKN);
+ t = jj_consume_token(IDENT_TKN);
+ rname = t.image;
+ jj_consume_token(LBRACE_TKN);
+ label_4:
+ while (true) {
+ f = Field();
+ flist.add(f);
+ jj_consume_token(SEMICOLON_TKN);
+ switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
+ case BYTE_TKN:
+ case BOOLEAN_TKN:
+ case INT_TKN:
+ case LONG_TKN:
+ case FLOAT_TKN:
+ case DOUBLE_TKN:
+ case USTRING_TKN:
+ case BUFFER_TKN:
+ case VECTOR_TKN:
+ case MAP_TKN:
+ case IDENT_TKN:
+ ;
+ break;
+ default:
+ jj_la1[4] = jj_gen;
+ break label_4;
+ }
+ }
+ jj_consume_token(RBRACE_TKN);
+ String fqn = curModuleName + "." + rname;
+ JRecord r = new JRecord(fqn, flist);
+ recTab.put(fqn, r);
+ {if (true) return r;}
+ throw new Error("Missing return statement in function");
+ }
+
+ final public JField Field() throws ParseException {
+ JType jt;
+ Token t;
+ jt = Type();
+ t = jj_consume_token(IDENT_TKN);
+ {if (true) return new JField(t.image, jt);}
+ throw new Error("Missing return statement in function");
+ }
+
+ final public JType Type() throws ParseException {
+ JType jt;
+ Token t;
+ String rname;
+ switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
+ case MAP_TKN:
+ jt = Map();
+ {if (true) return jt;}
+ break;
+ case VECTOR_TKN:
+ jt = Vector();
+ {if (true) return jt;}
+ break;
+ case BYTE_TKN:
+ jj_consume_token(BYTE_TKN);
+ {if (true) return new JByte();}
+ break;
+ case BOOLEAN_TKN:
+ jj_consume_token(BOOLEAN_TKN);
+ {if (true) return new JBoolean();}
+ break;
+ case INT_TKN:
+ jj_consume_token(INT_TKN);
+ {if (true) return new JInt();}
+ break;
+ case LONG_TKN:
+ jj_consume_token(LONG_TKN);
+ {if (true) return new JLong();}
+ break;
+ case FLOAT_TKN:
+ jj_consume_token(FLOAT_TKN);
+ {if (true) return new JFloat();}
+ break;
+ case DOUBLE_TKN:
+ jj_consume_token(DOUBLE_TKN);
+ {if (true) return new JDouble();}
+ break;
+ case USTRING_TKN:
+ jj_consume_token(USTRING_TKN);
+ {if (true) return new JString();}
+ break;
+ case BUFFER_TKN:
+ jj_consume_token(BUFFER_TKN);
+ {if (true) return new JBuffer();}
+ break;
+ case IDENT_TKN:
+ rname = ModuleName();
+ if (rname.indexOf('.', 0) < 0) {
+ rname = curModuleName + "." + rname;
+ }
+ JRecord r = recTab.get(rname);
+ if (r == null) {
+ System.out.println("Type " + rname + " not known. Exiting.");
+ System.exit(1);
+ }
+ {if (true) return r;}
+ break;
+ default:
+ jj_la1[5] = jj_gen;
+ jj_consume_token(-1);
+ throw new ParseException();
+ }
+ throw new Error("Missing return statement in function");
+ }
+
+ final public JMap Map() throws ParseException {
+ JType jt1;
+ JType jt2;
+ jj_consume_token(MAP_TKN);
+ jj_consume_token(LT_TKN);
+ jt1 = Type();
+ jj_consume_token(COMMA_TKN);
+ jt2 = Type();
+ jj_consume_token(GT_TKN);
+ {if (true) return new JMap(jt1, jt2);}
+ throw new Error("Missing return statement in function");
+ }
+
+ final public JVector Vector() throws ParseException {
+ JType jt;
+ jj_consume_token(VECTOR_TKN);
+ jj_consume_token(LT_TKN);
+ jt = Type();
+ jj_consume_token(GT_TKN);
+ {if (true) return new JVector(jt);}
+ throw new Error("Missing return statement in function");
+ }
+
+ public RccTokenManager token_source;
+ SimpleCharStream jj_input_stream;
+ public Token token, jj_nt;
+ private int jj_ntk;
+ private int jj_gen;
+ final private int[] jj_la1 = new int[6];
+ static private int[] jj_la1_0;
+ static private int[] jj_la1_1;
+ static {
+ jj_la1_0();
+ jj_la1_1();
+ }
+ private static void jj_la1_0() {
+ jj_la1_0 = new int[] {0x2800, 0x2800, 0x40000000, 0x1000, 0xffc000, 0xffc000,};
+ }
+ private static void jj_la1_1() {
+ jj_la1_1 = new int[] {0x0, 0x0, 0x0, 0x0, 0x1, 0x1,};
+ }
+
+ public Rcc(java.io.InputStream stream) {
+ this(stream, null);
+ }
+ public Rcc(java.io.InputStream stream, String encoding) {
+ try { jj_input_stream = new SimpleCharStream(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); }
+ token_source = new RccTokenManager(jj_input_stream);
+ token = new Token();
+ jj_ntk = -1;
+ jj_gen = 0;
+ for (int i = 0; i < 6; i++) jj_la1[i] = -1;
+ }
+
+ public void ReInit(java.io.InputStream stream) {
+ ReInit(stream, null);
+ }
+ public void ReInit(java.io.InputStream stream, String encoding) {
+ try { jj_input_stream.ReInit(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); }
+ token_source.ReInit(jj_input_stream);
+ token = new Token();
+ jj_ntk = -1;
+ jj_gen = 0;
+ for (int i = 0; i < 6; i++) jj_la1[i] = -1;
+ }
+
+ public Rcc(java.io.Reader stream) {
+ jj_input_stream = new SimpleCharStream(stream, 1, 1);
+ token_source = new RccTokenManager(jj_input_stream);
+ token = new Token();
+ jj_ntk = -1;
+ jj_gen = 0;
+ for (int i = 0; i < 6; i++) jj_la1[i] = -1;
+ }
+
+ public void ReInit(java.io.Reader stream) {
+ jj_input_stream.ReInit(stream, 1, 1);
+ token_source.ReInit(jj_input_stream);
+ token = new Token();
+ jj_ntk = -1;
+ jj_gen = 0;
+ for (int i = 0; i < 6; i++) jj_la1[i] = -1;
+ }
+
+ public Rcc(RccTokenManager tm) {
+ token_source = tm;
+ token = new Token();
+ jj_ntk = -1;
+ jj_gen = 0;
+ for (int i = 0; i < 6; i++) jj_la1[i] = -1;
+ }
+
+ public void ReInit(RccTokenManager tm) {
+ token_source = tm;
+ token = new Token();
+ jj_ntk = -1;
+ jj_gen = 0;
+ for (int i = 0; i < 6; i++) jj_la1[i] = -1;
+ }
+
+ final private Token jj_consume_token(int kind) throws ParseException {
+ Token oldToken;
+ if ((oldToken = token).next != null) token = token.next;
+ else token = token.next = token_source.getNextToken();
+ jj_ntk = -1;
+ if (token.kind == kind) {
+ jj_gen++;
+ return token;
+ }
+ token = oldToken;
+ jj_kind = kind;
+ throw generateParseException();
+ }
+
+ final public Token getNextToken() {
+ if (token.next != null) token = token.next;
+ else token = token.next = token_source.getNextToken();
+ jj_ntk = -1;
+ jj_gen++;
+ return token;
+ }
+
+ final public Token getToken(int index) {
+ Token t = token;
+ for (int i = 0; i < index; i++) {
+ if (t.next != null) t = t.next;
+ else t = t.next = token_source.getNextToken();
+ }
+ return t;
+ }
+
+ final private int jj_ntk() {
+ if ((jj_nt=token.next) == null)
+ return (jj_ntk = (token.next=token_source.getNextToken()).kind);
+ else
+ return (jj_ntk = jj_nt.kind);
+ }
+
+ private java.util.Vector jj_expentries = new java.util.Vector();
+ private int[] jj_expentry;
+ private int jj_kind = -1;
+
+ public ParseException generateParseException() {
+ jj_expentries.removeAllElements();
+ boolean[] la1tokens = new boolean[33];
+ for (int i = 0; i < 33; i++) {
+ la1tokens[i] = false;
+ }
+ if (jj_kind >= 0) {
+ la1tokens[jj_kind] = true;
+ jj_kind = -1;
+ }
+ for (int i = 0; i < 6; i++) {
+ if (jj_la1[i] == jj_gen) {
+ for (int j = 0; j < 32; j++) {
+ if ((jj_la1_0[i] & (1<Avro.
+ */
+@Deprecated
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public interface RccConstants {
+
+ int EOF = 0;
+ int MODULE_TKN = 11;
+ int RECORD_TKN = 12;
+ int INCLUDE_TKN = 13;
+ int BYTE_TKN = 14;
+ int BOOLEAN_TKN = 15;
+ int INT_TKN = 16;
+ int LONG_TKN = 17;
+ int FLOAT_TKN = 18;
+ int DOUBLE_TKN = 19;
+ int USTRING_TKN = 20;
+ int BUFFER_TKN = 21;
+ int VECTOR_TKN = 22;
+ int MAP_TKN = 23;
+ int LBRACE_TKN = 24;
+ int RBRACE_TKN = 25;
+ int LT_TKN = 26;
+ int GT_TKN = 27;
+ int SEMICOLON_TKN = 28;
+ int COMMA_TKN = 29;
+ int DOT_TKN = 30;
+ int CSTRING_TKN = 31;
+ int IDENT_TKN = 32;
+
+ int DEFAULT = 0;
+ int WithinOneLineComment = 1;
+ int WithinMultiLineComment = 2;
+
+ String[] tokenImage = {
+ "",
+ "\" \"",
+ "\"\\t\"",
+ "\"\\n\"",
+ "\"\\r\"",
+ "\"//\"",
+ "",
+ "",
+ "\"/*\"",
+ "\"*/\"",
+ "",
+ "\"module\"",
+ "\"class\"",
+ "\"include\"",
+ "\"byte\"",
+ "\"boolean\"",
+ "\"int\"",
+ "\"long\"",
+ "\"float\"",
+ "\"double\"",
+ "\"ustring\"",
+ "\"buffer\"",
+ "\"vector\"",
+ "\"map\"",
+ "\"{\"",
+ "\"}\"",
+ "\"<\"",
+ "\">\"",
+ "\";\"",
+ "\",\"",
+ "\".\"",
+ "",
+ "",
+ };
+
+}
diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/generated/RccTokenManager.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/generated/RccTokenManager.java
new file mode 100644
index 00000000000..7488606fe96
--- /dev/null
+++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/generated/RccTokenManager.java
@@ -0,0 +1,833 @@
+/* Generated By:JavaCC: Do not edit this line. RccTokenManager.java */
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record.compiler.generated;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * @deprecated Replaced by Avro.
+ */
+@Deprecated
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public class RccTokenManager implements RccConstants
+{
+ public java.io.PrintStream debugStream = System.out;
+ public void setDebugStream(java.io.PrintStream ds) { debugStream = ds; }
+ private final int jjMoveStringLiteralDfa0_1()
+ {
+ return jjMoveNfa_1(0, 0);
+ }
+ private final void jjCheckNAdd(int state)
+ {
+ if (jjrounds[state] != jjround)
+ {
+ jjstateSet[jjnewStateCnt++] = state;
+ jjrounds[state] = jjround;
+ }
+ }
+ private final void jjAddStates(int start, int end)
+ {
+ do {
+ jjstateSet[jjnewStateCnt++] = jjnextStates[start];
+ } while (start++ != end);
+ }
+ private final void jjCheckNAddTwoStates(int state1, int state2)
+ {
+ jjCheckNAdd(state1);
+ jjCheckNAdd(state2);
+ }
+ private final void jjCheckNAddStates(int start, int end)
+ {
+ do {
+ jjCheckNAdd(jjnextStates[start]);
+ } while (start++ != end);
+ }
+ private final void jjCheckNAddStates(int start)
+ {
+ jjCheckNAdd(jjnextStates[start]);
+ jjCheckNAdd(jjnextStates[start + 1]);
+ }
+ private final int jjMoveNfa_1(int startState, int curPos)
+ {
+ int[] nextStates;
+ int startsAt = 0;
+ jjnewStateCnt = 3;
+ int i = 1;
+ jjstateSet[0] = startState;
+ int j, kind = 0x7fffffff;
+ for (;;)
+ {
+ if (++jjround == 0x7fffffff)
+ ReInitRounds();
+ if (curChar < 64)
+ {
+ long l = 1L << curChar;
+ MatchLoop: do
+ {
+ switch(jjstateSet[--i])
+ {
+ case 0:
+ if ((0x2400L & l) != 0L)
+ {
+ if (kind > 6)
+ kind = 6;
+ }
+ if (curChar == 13)
+ jjstateSet[jjnewStateCnt++] = 1;
+ break;
+ case 1:
+ if (curChar == 10 && kind > 6)
+ kind = 6;
+ break;
+ case 2:
+ if (curChar == 13)
+ jjstateSet[jjnewStateCnt++] = 1;
+ break;
+ default : break;
+ }
+ } while(i != startsAt);
+ }
+ else if (curChar < 128)
+ {
+ long l = 1L << (curChar & 077);
+ MatchLoop: do
+ {
+ switch(jjstateSet[--i])
+ {
+ default : break;
+ }
+ } while(i != startsAt);
+ }
+ else
+ {
+ int i2 = (curChar & 0xff) >> 6;
+ long l2 = 1L << (curChar & 077);
+ MatchLoop: do
+ {
+ switch(jjstateSet[--i])
+ {
+ default : break;
+ }
+ } while(i != startsAt);
+ }
+ if (kind != 0x7fffffff)
+ {
+ jjmatchedKind = kind;
+ jjmatchedPos = curPos;
+ kind = 0x7fffffff;
+ }
+ ++curPos;
+ if ((i = jjnewStateCnt) == (startsAt = 3 - (jjnewStateCnt = startsAt)))
+ return curPos;
+ try { curChar = input_stream.readChar(); }
+ catch(java.io.IOException e) { return curPos; }
+ }
+ }
+ private final int jjStopStringLiteralDfa_0(int pos, long active0)
+ {
+ switch (pos)
+ {
+ case 0:
+ if ((active0 & 0xfff800L) != 0L)
+ {
+ jjmatchedKind = 32;
+ return 4;
+ }
+ return -1;
+ case 1:
+ if ((active0 & 0xfff800L) != 0L)
+ {
+ jjmatchedKind = 32;
+ jjmatchedPos = 1;
+ return 4;
+ }
+ return -1;
+ case 2:
+ if ((active0 & 0x7ef800L) != 0L)
+ {
+ jjmatchedKind = 32;
+ jjmatchedPos = 2;
+ return 4;
+ }
+ if ((active0 & 0x810000L) != 0L)
+ return 4;
+ return -1;
+ case 3:
+ if ((active0 & 0x24000L) != 0L)
+ return 4;
+ if ((active0 & 0x7cb800L) != 0L)
+ {
+ jjmatchedKind = 32;
+ jjmatchedPos = 3;
+ return 4;
+ }
+ return -1;
+ case 4:
+ if ((active0 & 0x41000L) != 0L)
+ return 4;
+ if ((active0 & 0x78a800L) != 0L)
+ {
+ jjmatchedKind = 32;
+ jjmatchedPos = 4;
+ return 4;
+ }
+ return -1;
+ case 5:
+ if ((active0 & 0x680800L) != 0L)
+ return 4;
+ if ((active0 & 0x10a000L) != 0L)
+ {
+ jjmatchedKind = 32;
+ jjmatchedPos = 5;
+ return 4;
+ }
+ return -1;
+ default :
+ return -1;
+ }
+ }
+ private final int jjStartNfa_0(int pos, long active0)
+ {
+ return jjMoveNfa_0(jjStopStringLiteralDfa_0(pos, active0), pos + 1);
+ }
+ private final int jjStopAtPos(int pos, int kind)
+ {
+ jjmatchedKind = kind;
+ jjmatchedPos = pos;
+ return pos + 1;
+ }
+ private final int jjStartNfaWithStates_0(int pos, int kind, int state)
+ {
+ jjmatchedKind = kind;
+ jjmatchedPos = pos;
+ try { curChar = input_stream.readChar(); }
+ catch(java.io.IOException e) { return pos + 1; }
+ return jjMoveNfa_0(state, pos + 1);
+ }
+ private final int jjMoveStringLiteralDfa0_0()
+ {
+ switch(curChar)
+ {
+ case 44:
+ return jjStopAtPos(0, 29);
+ case 46:
+ return jjStopAtPos(0, 30);
+ case 47:
+ return jjMoveStringLiteralDfa1_0(0x120L);
+ case 59:
+ return jjStopAtPos(0, 28);
+ case 60:
+ return jjStopAtPos(0, 26);
+ case 62:
+ return jjStopAtPos(0, 27);
+ case 98:
+ return jjMoveStringLiteralDfa1_0(0x20c000L);
+ case 99:
+ return jjMoveStringLiteralDfa1_0(0x1000L);
+ case 100:
+ return jjMoveStringLiteralDfa1_0(0x80000L);
+ case 102:
+ return jjMoveStringLiteralDfa1_0(0x40000L);
+ case 105:
+ return jjMoveStringLiteralDfa1_0(0x12000L);
+ case 108:
+ return jjMoveStringLiteralDfa1_0(0x20000L);
+ case 109:
+ return jjMoveStringLiteralDfa1_0(0x800800L);
+ case 117:
+ return jjMoveStringLiteralDfa1_0(0x100000L);
+ case 118:
+ return jjMoveStringLiteralDfa1_0(0x400000L);
+ case 123:
+ return jjStopAtPos(0, 24);
+ case 125:
+ return jjStopAtPos(0, 25);
+ default :
+ return jjMoveNfa_0(0, 0);
+ }
+ }
+ private final int jjMoveStringLiteralDfa1_0(long active0)
+ {
+ try { curChar = input_stream.readChar(); }
+ catch(java.io.IOException e) {
+ jjStopStringLiteralDfa_0(0, active0);
+ return 1;
+ }
+ switch(curChar)
+ {
+ case 42:
+ if ((active0 & 0x100L) != 0L)
+ return jjStopAtPos(1, 8);
+ break;
+ case 47:
+ if ((active0 & 0x20L) != 0L)
+ return jjStopAtPos(1, 5);
+ break;
+ case 97:
+ return jjMoveStringLiteralDfa2_0(active0, 0x800000L);
+ case 101:
+ return jjMoveStringLiteralDfa2_0(active0, 0x400000L);
+ case 108:
+ return jjMoveStringLiteralDfa2_0(active0, 0x41000L);
+ case 110:
+ return jjMoveStringLiteralDfa2_0(active0, 0x12000L);
+ case 111:
+ return jjMoveStringLiteralDfa2_0(active0, 0xa8800L);
+ case 115:
+ return jjMoveStringLiteralDfa2_0(active0, 0x100000L);
+ case 117:
+ return jjMoveStringLiteralDfa2_0(active0, 0x200000L);
+ case 121:
+ return jjMoveStringLiteralDfa2_0(active0, 0x4000L);
+ default :
+ break;
+ }
+ return jjStartNfa_0(0, active0);
+ }
+ private final int jjMoveStringLiteralDfa2_0(long old0, long active0)
+ {
+ if (((active0 &= old0)) == 0L)
+ return jjStartNfa_0(0, old0);
+ try { curChar = input_stream.readChar(); }
+ catch(java.io.IOException e) {
+ jjStopStringLiteralDfa_0(1, active0);
+ return 2;
+ }
+ switch(curChar)
+ {
+ case 97:
+ return jjMoveStringLiteralDfa3_0(active0, 0x1000L);
+ case 99:
+ return jjMoveStringLiteralDfa3_0(active0, 0x402000L);
+ case 100:
+ return jjMoveStringLiteralDfa3_0(active0, 0x800L);
+ case 102:
+ return jjMoveStringLiteralDfa3_0(active0, 0x200000L);
+ case 110:
+ return jjMoveStringLiteralDfa3_0(active0, 0x20000L);
+ case 111:
+ return jjMoveStringLiteralDfa3_0(active0, 0x48000L);
+ case 112:
+ if ((active0 & 0x800000L) != 0L)
+ return jjStartNfaWithStates_0(2, 23, 4);
+ break;
+ case 116:
+ if ((active0 & 0x10000L) != 0L)
+ return jjStartNfaWithStates_0(2, 16, 4);
+ return jjMoveStringLiteralDfa3_0(active0, 0x104000L);
+ case 117:
+ return jjMoveStringLiteralDfa3_0(active0, 0x80000L);
+ default :
+ break;
+ }
+ return jjStartNfa_0(1, active0);
+ }
+ private final int jjMoveStringLiteralDfa3_0(long old0, long active0)
+ {
+ if (((active0 &= old0)) == 0L)
+ return jjStartNfa_0(1, old0);
+ try { curChar = input_stream.readChar(); }
+ catch(java.io.IOException e) {
+ jjStopStringLiteralDfa_0(2, active0);
+ return 3;
+ }
+ switch(curChar)
+ {
+ case 97:
+ return jjMoveStringLiteralDfa4_0(active0, 0x40000L);
+ case 98:
+ return jjMoveStringLiteralDfa4_0(active0, 0x80000L);
+ case 101:
+ if ((active0 & 0x4000L) != 0L)
+ return jjStartNfaWithStates_0(3, 14, 4);
+ break;
+ case 102:
+ return jjMoveStringLiteralDfa4_0(active0, 0x200000L);
+ case 103:
+ if ((active0 & 0x20000L) != 0L)
+ return jjStartNfaWithStates_0(3, 17, 4);
+ break;
+ case 108:
+ return jjMoveStringLiteralDfa4_0(active0, 0xa000L);
+ case 114:
+ return jjMoveStringLiteralDfa4_0(active0, 0x100000L);
+ case 115:
+ return jjMoveStringLiteralDfa4_0(active0, 0x1000L);
+ case 116:
+ return jjMoveStringLiteralDfa4_0(active0, 0x400000L);
+ case 117:
+ return jjMoveStringLiteralDfa4_0(active0, 0x800L);
+ default :
+ break;
+ }
+ return jjStartNfa_0(2, active0);
+ }
+ private final int jjMoveStringLiteralDfa4_0(long old0, long active0)
+ {
+ if (((active0 &= old0)) == 0L)
+ return jjStartNfa_0(2, old0);
+ try { curChar = input_stream.readChar(); }
+ catch(java.io.IOException e) {
+ jjStopStringLiteralDfa_0(3, active0);
+ return 4;
+ }
+ switch(curChar)
+ {
+ case 101:
+ return jjMoveStringLiteralDfa5_0(active0, 0x208000L);
+ case 105:
+ return jjMoveStringLiteralDfa5_0(active0, 0x100000L);
+ case 108:
+ return jjMoveStringLiteralDfa5_0(active0, 0x80800L);
+ case 111:
+ return jjMoveStringLiteralDfa5_0(active0, 0x400000L);
+ case 115:
+ if ((active0 & 0x1000L) != 0L)
+ return jjStartNfaWithStates_0(4, 12, 4);
+ break;
+ case 116:
+ if ((active0 & 0x40000L) != 0L)
+ return jjStartNfaWithStates_0(4, 18, 4);
+ break;
+ case 117:
+ return jjMoveStringLiteralDfa5_0(active0, 0x2000L);
+ default :
+ break;
+ }
+ return jjStartNfa_0(3, active0);
+ }
+ private final int jjMoveStringLiteralDfa5_0(long old0, long active0)
+ {
+ if (((active0 &= old0)) == 0L)
+ return jjStartNfa_0(3, old0);
+ try { curChar = input_stream.readChar(); }
+ catch(java.io.IOException e) {
+ jjStopStringLiteralDfa_0(4, active0);
+ return 5;
+ }
+ switch(curChar)
+ {
+ case 97:
+ return jjMoveStringLiteralDfa6_0(active0, 0x8000L);
+ case 100:
+ return jjMoveStringLiteralDfa6_0(active0, 0x2000L);
+ case 101:
+ if ((active0 & 0x800L) != 0L)
+ return jjStartNfaWithStates_0(5, 11, 4);
+ else if ((active0 & 0x80000L) != 0L)
+ return jjStartNfaWithStates_0(5, 19, 4);
+ break;
+ case 110:
+ return jjMoveStringLiteralDfa6_0(active0, 0x100000L);
+ case 114:
+ if ((active0 & 0x200000L) != 0L)
+ return jjStartNfaWithStates_0(5, 21, 4);
+ else if ((active0 & 0x400000L) != 0L)
+ return jjStartNfaWithStates_0(5, 22, 4);
+ break;
+ default :
+ break;
+ }
+ return jjStartNfa_0(4, active0);
+ }
+ private final int jjMoveStringLiteralDfa6_0(long old0, long active0)
+ {
+ if (((active0 &= old0)) == 0L)
+ return jjStartNfa_0(4, old0);
+ try { curChar = input_stream.readChar(); }
+ catch(java.io.IOException e) {
+ jjStopStringLiteralDfa_0(5, active0);
+ return 6;
+ }
+ switch(curChar)
+ {
+ case 101:
+ if ((active0 & 0x2000L) != 0L)
+ return jjStartNfaWithStates_0(6, 13, 4);
+ break;
+ case 103:
+ if ((active0 & 0x100000L) != 0L)
+ return jjStartNfaWithStates_0(6, 20, 4);
+ break;
+ case 110:
+ if ((active0 & 0x8000L) != 0L)
+ return jjStartNfaWithStates_0(6, 15, 4);
+ break;
+ default :
+ break;
+ }
+ return jjStartNfa_0(5, active0);
+ }
+ static final long[] jjbitVec0 = {
+ 0x0L, 0x0L, 0xffffffffffffffffL, 0xffffffffffffffffL
+ };
+ private final int jjMoveNfa_0(int startState, int curPos)
+ {
+ int[] nextStates;
+ int startsAt = 0;
+ jjnewStateCnt = 5;
+ int i = 1;
+ jjstateSet[0] = startState;
+ int j, kind = 0x7fffffff;
+ for (;;)
+ {
+ if (++jjround == 0x7fffffff)
+ ReInitRounds();
+ if (curChar < 64)
+ {
+ long l = 1L << curChar;
+ MatchLoop: do
+ {
+ switch(jjstateSet[--i])
+ {
+ case 0:
+ if (curChar == 34)
+ jjCheckNAdd(1);
+ break;
+ case 1:
+ if ((0xfffffffbffffffffL & l) != 0L)
+ jjCheckNAddTwoStates(1, 2);
+ break;
+ case 2:
+ if (curChar == 34 && kind > 31)
+ kind = 31;
+ break;
+ case 4:
+ if ((0x3ff000000000000L & l) == 0L)
+ break;
+ if (kind > 32)
+ kind = 32;
+ jjstateSet[jjnewStateCnt++] = 4;
+ break;
+ default : break;
+ }
+ } while(i != startsAt);
+ }
+ else if (curChar < 128)
+ {
+ long l = 1L << (curChar & 077);
+ MatchLoop: do
+ {
+ switch(jjstateSet[--i])
+ {
+ case 0:
+ if ((0x7fffffe07fffffeL & l) == 0L)
+ break;
+ if (kind > 32)
+ kind = 32;
+ jjCheckNAdd(4);
+ break;
+ case 1:
+ jjAddStates(0, 1);
+ break;
+ case 4:
+ if ((0x7fffffe87fffffeL & l) == 0L)
+ break;
+ if (kind > 32)
+ kind = 32;
+ jjCheckNAdd(4);
+ break;
+ default : break;
+ }
+ } while(i != startsAt);
+ }
+ else
+ {
+ int i2 = (curChar & 0xff) >> 6;
+ long l2 = 1L << (curChar & 077);
+ MatchLoop: do
+ {
+ switch(jjstateSet[--i])
+ {
+ case 1:
+ if ((jjbitVec0[i2] & l2) != 0L)
+ jjAddStates(0, 1);
+ break;
+ default : break;
+ }
+ } while(i != startsAt);
+ }
+ if (kind != 0x7fffffff)
+ {
+ jjmatchedKind = kind;
+ jjmatchedPos = curPos;
+ kind = 0x7fffffff;
+ }
+ ++curPos;
+ if ((i = jjnewStateCnt) == (startsAt = 5 - (jjnewStateCnt = startsAt)))
+ return curPos;
+ try { curChar = input_stream.readChar(); }
+ catch(java.io.IOException e) { return curPos; }
+ }
+ }
+ private final int jjMoveStringLiteralDfa0_2()
+ {
+ switch(curChar)
+ {
+ case 42:
+ return jjMoveStringLiteralDfa1_2(0x200L);
+ default :
+ return 1;
+ }
+ }
+ private final int jjMoveStringLiteralDfa1_2(long active0)
+ {
+ try { curChar = input_stream.readChar(); }
+ catch(java.io.IOException e) {
+ return 1;
+ }
+ switch(curChar)
+ {
+ case 47:
+ if ((active0 & 0x200L) != 0L)
+ return jjStopAtPos(1, 9);
+ break;
+ default :
+ return 2;
+ }
+ return 2;
+ }
+ static final int[] jjnextStates = {
+ 1, 2,
+ };
+ public static final String[] jjstrLiteralImages = {
+ "", null, null, null, null, null, null, null, null, null, null,
+ "\155\157\144\165\154\145", "\143\154\141\163\163", "\151\156\143\154\165\144\145", "\142\171\164\145",
+ "\142\157\157\154\145\141\156", "\151\156\164", "\154\157\156\147", "\146\154\157\141\164",
+ "\144\157\165\142\154\145", "\165\163\164\162\151\156\147", "\142\165\146\146\145\162",
+ "\166\145\143\164\157\162", "\155\141\160", "\173", "\175", "\74", "\76", "\73", "\54", "\56", null, null, };
+ public static final String[] lexStateNames = {
+ "DEFAULT",
+ "WithinOneLineComment",
+ "WithinMultiLineComment",
+ };
+ public static final int[] jjnewLexState = {
+ -1, -1, -1, -1, -1, 1, 0, -1, 2, 0, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1,
+ };
+ static final long[] jjtoToken = {
+ 0x1fffff801L,
+ };
+ static final long[] jjtoSkip = {
+ 0x37eL,
+ };
+ static final long[] jjtoSpecial = {
+ 0x360L,
+ };
+ static final long[] jjtoMore = {
+ 0x480L,
+ };
+ protected SimpleCharStream input_stream;
+ private final int[] jjrounds = new int[5];
+ private final int[] jjstateSet = new int[10];
+ StringBuffer image;
+ int jjimageLen;
+ int lengthOfMatch;
+ protected char curChar;
+ public RccTokenManager(SimpleCharStream stream){
+ if (SimpleCharStream.staticFlag)
+ throw new Error("ERROR: Cannot use a static CharStream class with a non-static lexical analyzer.");
+ input_stream = stream;
+ }
+ public RccTokenManager(SimpleCharStream stream, int lexState){
+ this(stream);
+ SwitchTo(lexState);
+ }
+ public void ReInit(SimpleCharStream stream)
+ {
+ jjmatchedPos = jjnewStateCnt = 0;
+ curLexState = defaultLexState;
+ input_stream = stream;
+ ReInitRounds();
+ }
+ private final void ReInitRounds()
+ {
+ int i;
+ jjround = 0x80000001;
+ for (i = 5; i-- > 0;)
+ jjrounds[i] = 0x80000000;
+ }
+ public void ReInit(SimpleCharStream stream, int lexState)
+ {
+ ReInit(stream);
+ SwitchTo(lexState);
+ }
+ public void SwitchTo(int lexState)
+ {
+ if (lexState >= 3 || lexState < 0)
+ throw new TokenMgrError("Error: Ignoring invalid lexical state : " + lexState + ". State unchanged.", TokenMgrError.INVALID_LEXICAL_STATE);
+ else
+ curLexState = lexState;
+ }
+
+ protected Token jjFillToken()
+ {
+ Token t = Token.newToken(jjmatchedKind);
+ t.kind = jjmatchedKind;
+ String im = jjstrLiteralImages[jjmatchedKind];
+ t.image = (im == null) ? input_stream.GetImage() : im;
+ t.beginLine = input_stream.getBeginLine();
+ t.beginColumn = input_stream.getBeginColumn();
+ t.endLine = input_stream.getEndLine();
+ t.endColumn = input_stream.getEndColumn();
+ return t;
+ }
+
+ int curLexState = 0;
+ int defaultLexState = 0;
+ int jjnewStateCnt;
+ int jjround;
+ int jjmatchedPos;
+ int jjmatchedKind;
+
+ public Token getNextToken()
+ {
+ int kind;
+ Token specialToken = null;
+ Token matchedToken;
+ int curPos = 0;
+
+ EOFLoop :
+ for (;;)
+ {
+ try
+ {
+ curChar = input_stream.BeginToken();
+ }
+ catch(java.io.IOException e)
+ {
+ jjmatchedKind = 0;
+ matchedToken = jjFillToken();
+ matchedToken.specialToken = specialToken;
+ return matchedToken;
+ }
+ image = null;
+ jjimageLen = 0;
+
+ for (;;)
+ {
+ switch(curLexState)
+ {
+ case 0:
+ try { input_stream.backup(0);
+ while (curChar <= 32 && (0x100002600L & (1L << curChar)) != 0L)
+ curChar = input_stream.BeginToken();
+ }
+ catch (java.io.IOException e1) { continue EOFLoop; }
+ jjmatchedKind = 0x7fffffff;
+ jjmatchedPos = 0;
+ curPos = jjMoveStringLiteralDfa0_0();
+ break;
+ case 1:
+ jjmatchedKind = 0x7fffffff;
+ jjmatchedPos = 0;
+ curPos = jjMoveStringLiteralDfa0_1();
+ if (jjmatchedPos == 0 && jjmatchedKind > 7)
+ {
+ jjmatchedKind = 7;
+ }
+ break;
+ case 2:
+ jjmatchedKind = 0x7fffffff;
+ jjmatchedPos = 0;
+ curPos = jjMoveStringLiteralDfa0_2();
+ if (jjmatchedPos == 0 && jjmatchedKind > 10)
+ {
+ jjmatchedKind = 10;
+ }
+ break;
+ }
+ if (jjmatchedKind != 0x7fffffff)
+ {
+ if (jjmatchedPos + 1 < curPos)
+ input_stream.backup(curPos - jjmatchedPos - 1);
+ if ((jjtoToken[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L)
+ {
+ matchedToken = jjFillToken();
+ matchedToken.specialToken = specialToken;
+ if (jjnewLexState[jjmatchedKind] != -1)
+ curLexState = jjnewLexState[jjmatchedKind];
+ return matchedToken;
+ }
+ else if ((jjtoSkip[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L)
+ {
+ if ((jjtoSpecial[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L)
+ {
+ matchedToken = jjFillToken();
+ if (specialToken == null)
+ specialToken = matchedToken;
+ else
+ {
+ matchedToken.specialToken = specialToken;
+ specialToken = (specialToken.next = matchedToken);
+ }
+ SkipLexicalActions(matchedToken);
+ }
+ else
+ SkipLexicalActions(null);
+ if (jjnewLexState[jjmatchedKind] != -1)
+ curLexState = jjnewLexState[jjmatchedKind];
+ continue EOFLoop;
+ }
+ jjimageLen += jjmatchedPos + 1;
+ if (jjnewLexState[jjmatchedKind] != -1)
+ curLexState = jjnewLexState[jjmatchedKind];
+ curPos = 0;
+ jjmatchedKind = 0x7fffffff;
+ try {
+ curChar = input_stream.readChar();
+ continue;
+ }
+ catch (java.io.IOException e1) { }
+ }
+ int error_line = input_stream.getEndLine();
+ int error_column = input_stream.getEndColumn();
+ String error_after = null;
+ boolean EOFSeen = false;
+ try { input_stream.readChar(); input_stream.backup(1); }
+ catch (java.io.IOException e1) {
+ EOFSeen = true;
+ error_after = curPos <= 1 ? "" : input_stream.GetImage();
+ if (curChar == '\n' || curChar == '\r') {
+ error_line++;
+ error_column = 0;
+ }
+ else
+ error_column++;
+ }
+ if (!EOFSeen) {
+ input_stream.backup(1);
+ error_after = curPos <= 1 ? "" : input_stream.GetImage();
+ }
+ throw new TokenMgrError(EOFSeen, curLexState, error_line, error_column, error_after, curChar, TokenMgrError.LEXICAL_ERROR);
+ }
+ }
+ }
+
+ void SkipLexicalActions(Token matchedToken)
+ {
+ switch(jjmatchedKind)
+ {
+ default :
+ break;
+ }
+ }
+}
diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/generated/SimpleCharStream.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/generated/SimpleCharStream.java
new file mode 100644
index 00000000000..bdac777e72b
--- /dev/null
+++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/generated/SimpleCharStream.java
@@ -0,0 +1,446 @@
+/* Generated By:JavaCC: Do not edit this line. SimpleCharStream.java Version 4.0 */
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record.compiler.generated;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * An implementation of interface CharStream, where the stream is assumed to
+ * contain only ASCII characters (without unicode processing).
+ *
+ * @deprecated Replaced by Avro.
+ */
+@Deprecated
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public class SimpleCharStream
+{
+ public static final boolean staticFlag = false;
+ int bufsize;
+ int available;
+ int tokenBegin;
+ public int bufpos = -1;
+ protected int bufline[];
+ protected int bufcolumn[];
+
+ protected int column = 0;
+ protected int line = 1;
+
+ protected boolean prevCharIsCR = false;
+ protected boolean prevCharIsLF = false;
+
+ protected java.io.Reader inputStream;
+
+ protected char[] buffer;
+ protected int maxNextCharInd = 0;
+ protected int inBuf = 0;
+ protected int tabSize = 8;
+
+ protected void setTabSize(int i) { tabSize = i; }
+ protected int getTabSize(int i) { return tabSize; }
+
+
+ protected void ExpandBuff(boolean wrapAround)
+ {
+ char[] newbuffer = new char[bufsize + 2048];
+ int newbufline[] = new int[bufsize + 2048];
+ int newbufcolumn[] = new int[bufsize + 2048];
+
+ try
+ {
+ if (wrapAround)
+ {
+ System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin);
+ System.arraycopy(buffer, 0, newbuffer,
+ bufsize - tokenBegin, bufpos);
+ buffer = newbuffer;
+
+ System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin);
+ System.arraycopy(bufline, 0, newbufline, bufsize - tokenBegin, bufpos);
+ bufline = newbufline;
+
+ System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin);
+ System.arraycopy(bufcolumn, 0, newbufcolumn, bufsize - tokenBegin, bufpos);
+ bufcolumn = newbufcolumn;
+
+ maxNextCharInd = (bufpos += (bufsize - tokenBegin));
+ }
+ else
+ {
+ System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin);
+ buffer = newbuffer;
+
+ System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin);
+ bufline = newbufline;
+
+ System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin);
+ bufcolumn = newbufcolumn;
+
+ maxNextCharInd = (bufpos -= tokenBegin);
+ }
+ }
+ catch (Throwable t)
+ {
+ throw new Error(t.getMessage());
+ }
+
+
+ bufsize += 2048;
+ available = bufsize;
+ tokenBegin = 0;
+ }
+
+ protected void FillBuff() throws java.io.IOException
+ {
+ if (maxNextCharInd == available)
+ {
+ if (available == bufsize)
+ {
+ if (tokenBegin > 2048)
+ {
+ bufpos = maxNextCharInd = 0;
+ available = tokenBegin;
+ }
+ else if (tokenBegin < 0)
+ bufpos = maxNextCharInd = 0;
+ else
+ ExpandBuff(false);
+ }
+ else if (available > tokenBegin)
+ available = bufsize;
+ else if ((tokenBegin - available) < 2048)
+ ExpandBuff(true);
+ else
+ available = tokenBegin;
+ }
+
+ int i;
+ try {
+ if ((i = inputStream.read(buffer, maxNextCharInd,
+ available - maxNextCharInd)) == -1)
+ {
+ inputStream.close();
+ throw new java.io.IOException();
+ }
+ else
+ maxNextCharInd += i;
+ return;
+ }
+ catch(java.io.IOException e) {
+ --bufpos;
+ backup(0);
+ if (tokenBegin == -1)
+ tokenBegin = bufpos;
+ throw e;
+ }
+ }
+
+ public char BeginToken() throws java.io.IOException
+ {
+ tokenBegin = -1;
+ char c = readChar();
+ tokenBegin = bufpos;
+
+ return c;
+ }
+
+ protected void UpdateLineColumn(char c)
+ {
+ column++;
+
+ if (prevCharIsLF)
+ {
+ prevCharIsLF = false;
+ line += (column = 1);
+ }
+ else if (prevCharIsCR)
+ {
+ prevCharIsCR = false;
+ if (c == '\n')
+ {
+ prevCharIsLF = true;
+ }
+ else
+ line += (column = 1);
+ }
+
+ switch (c)
+ {
+ case '\r' :
+ prevCharIsCR = true;
+ break;
+ case '\n' :
+ prevCharIsLF = true;
+ break;
+ case '\t' :
+ column--;
+ column += (tabSize - (column % tabSize));
+ break;
+ default :
+ break;
+ }
+
+ bufline[bufpos] = line;
+ bufcolumn[bufpos] = column;
+ }
+
+ public char readChar() throws java.io.IOException
+ {
+ if (inBuf > 0)
+ {
+ --inBuf;
+
+ if (++bufpos == bufsize)
+ bufpos = 0;
+
+ return buffer[bufpos];
+ }
+
+ if (++bufpos >= maxNextCharInd)
+ FillBuff();
+
+ char c = buffer[bufpos];
+
+ UpdateLineColumn(c);
+ return (c);
+ }
+
+ public int getEndColumn() {
+ return bufcolumn[bufpos];
+ }
+
+ public int getEndLine() {
+ return bufline[bufpos];
+ }
+
+ public int getBeginColumn() {
+ return bufcolumn[tokenBegin];
+ }
+
+ public int getBeginLine() {
+ return bufline[tokenBegin];
+ }
+
+ public void backup(int amount) {
+
+ inBuf += amount;
+ if ((bufpos -= amount) < 0)
+ bufpos += bufsize;
+ }
+
+ public SimpleCharStream(java.io.Reader dstream, int startline,
+ int startcolumn, int buffersize)
+ {
+ inputStream = dstream;
+ line = startline;
+ column = startcolumn - 1;
+
+ available = bufsize = buffersize;
+ buffer = new char[buffersize];
+ bufline = new int[buffersize];
+ bufcolumn = new int[buffersize];
+ }
+
+ public SimpleCharStream(java.io.Reader dstream, int startline,
+ int startcolumn)
+ {
+ this(dstream, startline, startcolumn, 4096);
+ }
+
+ public SimpleCharStream(java.io.Reader dstream)
+ {
+ this(dstream, 1, 1, 4096);
+ }
+ public void ReInit(java.io.Reader dstream, int startline,
+ int startcolumn, int buffersize)
+ {
+ inputStream = dstream;
+ line = startline;
+ column = startcolumn - 1;
+
+ if (buffer == null || buffersize != buffer.length)
+ {
+ available = bufsize = buffersize;
+ buffer = new char[buffersize];
+ bufline = new int[buffersize];
+ bufcolumn = new int[buffersize];
+ }
+ prevCharIsLF = prevCharIsCR = false;
+ tokenBegin = inBuf = maxNextCharInd = 0;
+ bufpos = -1;
+ }
+
+ public void ReInit(java.io.Reader dstream, int startline,
+ int startcolumn)
+ {
+ ReInit(dstream, startline, startcolumn, 4096);
+ }
+
+ public void ReInit(java.io.Reader dstream)
+ {
+ ReInit(dstream, 1, 1, 4096);
+ }
+ public SimpleCharStream(java.io.InputStream dstream, String encoding, int startline,
+ int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException
+ {
+ this(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize);
+ }
+
+ public SimpleCharStream(java.io.InputStream dstream, int startline,
+ int startcolumn, int buffersize)
+ {
+ this(new java.io.InputStreamReader(dstream), startline, startcolumn, buffersize);
+ }
+
+ public SimpleCharStream(java.io.InputStream dstream, String encoding, int startline,
+ int startcolumn) throws java.io.UnsupportedEncodingException
+ {
+ this(dstream, encoding, startline, startcolumn, 4096);
+ }
+
+ public SimpleCharStream(java.io.InputStream dstream, int startline,
+ int startcolumn)
+ {
+ this(dstream, startline, startcolumn, 4096);
+ }
+
+ public SimpleCharStream(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException
+ {
+ this(dstream, encoding, 1, 1, 4096);
+ }
+
+ public SimpleCharStream(java.io.InputStream dstream)
+ {
+ this(dstream, 1, 1, 4096);
+ }
+
+ public void ReInit(java.io.InputStream dstream, String encoding, int startline,
+ int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException
+ {
+ ReInit(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize);
+ }
+
+ public void ReInit(java.io.InputStream dstream, int startline,
+ int startcolumn, int buffersize)
+ {
+ ReInit(new java.io.InputStreamReader(dstream), startline, startcolumn, buffersize);
+ }
+
+ public void ReInit(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException
+ {
+ ReInit(dstream, encoding, 1, 1, 4096);
+ }
+
+ public void ReInit(java.io.InputStream dstream)
+ {
+ ReInit(dstream, 1, 1, 4096);
+ }
+ public void ReInit(java.io.InputStream dstream, String encoding, int startline,
+ int startcolumn) throws java.io.UnsupportedEncodingException
+ {
+ ReInit(dstream, encoding, startline, startcolumn, 4096);
+ }
+ public void ReInit(java.io.InputStream dstream, int startline,
+ int startcolumn)
+ {
+ ReInit(dstream, startline, startcolumn, 4096);
+ }
+ public String GetImage()
+ {
+ if (bufpos >= tokenBegin)
+ return new String(buffer, tokenBegin, bufpos - tokenBegin + 1);
+ else
+ return new String(buffer, tokenBegin, bufsize - tokenBegin) +
+ new String(buffer, 0, bufpos + 1);
+ }
+
+ public char[] GetSuffix(int len)
+ {
+ char[] ret = new char[len];
+
+ if ((bufpos + 1) >= len)
+ System.arraycopy(buffer, bufpos - len + 1, ret, 0, len);
+ else
+ {
+ System.arraycopy(buffer, bufsize - (len - bufpos - 1), ret, 0,
+ len - bufpos - 1);
+ System.arraycopy(buffer, 0, ret, len - bufpos - 1, bufpos + 1);
+ }
+
+ return ret;
+ }
+
+ public void Done()
+ {
+ buffer = null;
+ bufline = null;
+ bufcolumn = null;
+ }
+
+ /**
+ * Method to adjust line and column numbers for the start of a token.
+ */
+ public void adjustBeginLineColumn(int newLine, int newCol)
+ {
+ int start = tokenBegin;
+ int len;
+
+ if (bufpos >= tokenBegin)
+ {
+ len = bufpos - tokenBegin + inBuf + 1;
+ }
+ else
+ {
+ len = bufsize - tokenBegin + bufpos + 1 + inBuf;
+ }
+
+ int i = 0, j = 0, k = 0;
+ int nextColDiff = 0, columnDiff = 0;
+
+ while (i < len &&
+ bufline[j = start % bufsize] == bufline[k = ++start % bufsize])
+ {
+ bufline[j] = newLine;
+ nextColDiff = columnDiff + bufcolumn[k] - bufcolumn[j];
+ bufcolumn[j] = newCol + columnDiff;
+ columnDiff = nextColDiff;
+ i++;
+ }
+
+ if (i < len)
+ {
+ bufline[j] = newLine++;
+ bufcolumn[j] = newCol + columnDiff;
+
+ while (i++ < len)
+ {
+ if (bufline[j = start % bufsize] != bufline[++start % bufsize])
+ bufline[j] = newLine++;
+ else
+ bufline[j] = newLine;
+ }
+ }
+
+ line = bufline[j];
+ column = bufcolumn[j];
+ }
+
+}
diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/generated/Token.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/generated/Token.java
new file mode 100644
index 00000000000..1396bf899b5
--- /dev/null
+++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/generated/Token.java
@@ -0,0 +1,107 @@
+/* Generated By:JavaCC: Do not edit this line. Token.java Version 3.0 */
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record.compiler.generated;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * Describes the input token stream.
+ *
+ * @deprecated Replaced by Avro.
+ */
+@Deprecated
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public class Token {
+
+ /**
+ * An integer that describes the kind of this token. This numbering
+ * system is determined by JavaCCParser, and a table of these numbers is
+ * stored in the file ...Constants.java.
+ */
+ public int kind;
+
+ /**
+ * beginLine and beginColumn describe the position of the first character
+ * of this token; endLine and endColumn describe the position of the
+ * last character of this token.
+ */
+ public int beginLine, beginColumn, endLine, endColumn;
+
+ /**
+ * The string image of the token.
+ */
+ public String image;
+
+ /**
+ * A reference to the next regular (non-special) token from the input
+ * stream. If this is the last token from the input stream, or if the
+ * token manager has not read tokens beyond this one, this field is
+ * set to null. This is true only if this token is also a regular
+ * token. Otherwise, see below for a description of the contents of
+ * this field.
+ */
+ public Token next;
+
+ /**
+ * This field is used to access special tokens that occur prior to this
+ * token, but after the immediately preceding regular (non-special) token.
+ * If there are no such special tokens, this field is set to null.
+ * When there are more than one such special token, this field refers
+ * to the last of these special tokens, which in turn refers to the next
+ * previous special token through its specialToken field, and so on
+ * until the first special token (whose specialToken field is null).
+ * The next fields of special tokens refer to other special tokens that
+ * immediately follow it (without an intervening regular token). If there
+ * is no such token, this field is null.
+ */
+ public Token specialToken;
+
+ /**
+ * Returns the image.
+ */
+ @Override
+ public String toString()
+ {
+ return image;
+ }
+
+ /**
+ * Returns a new Token object, by default. However, if you want, you
+ * can create and return subclass objects based on the value of ofKind.
+ * Simply add the cases to the switch for all those special cases.
+ * For example, if you have a subclass of Token called IDToken that
+ * you want to create if ofKind is ID, simlpy add something like :
+ *
+ * case MyParserConstants.ID : return new IDToken();
+ *
+ * to the following switch statement. Then you can cast matchedToken
+ * variable to the appropriate type and use it in your lexical actions.
+ */
+ public static final Token newToken(int ofKind)
+ {
+ switch(ofKind)
+ {
+ default : return new Token();
+ }
+ }
+
+}
diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/generated/TokenMgrError.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/generated/TokenMgrError.java
new file mode 100644
index 00000000000..b6da7dadcd8
--- /dev/null
+++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/generated/TokenMgrError.java
@@ -0,0 +1,161 @@
+/* Generated By:JavaCC: Do not edit this line. TokenMgrError.java Version 3.0 */
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record.compiler.generated;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * @deprecated Replaced by Avro.
+ */
+@Deprecated
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public class TokenMgrError extends Error
+{
+ /*
+ * Ordinals for various reasons why an Error of this type can be thrown.
+ */
+
+ /**
+ * Lexical error occured.
+ */
+ static final int LEXICAL_ERROR = 0;
+
+ /**
+ * An attempt wass made to create a second instance of a static token manager.
+ */
+ static final int STATIC_LEXER_ERROR = 1;
+
+ /**
+ * Tried to change to an invalid lexical state.
+ */
+ static final int INVALID_LEXICAL_STATE = 2;
+
+ /**
+ * Detected (and bailed out of) an infinite loop in the token manager.
+ */
+ static final int LOOP_DETECTED = 3;
+
+ /**
+ * Indicates the reason why the exception is thrown. It will have
+ * one of the above 4 values.
+ */
+ int errorCode;
+
+ /**
+ * Replaces unprintable characters by their espaced (or unicode escaped)
+ * equivalents in the given string
+ */
+ protected static final String addEscapes(String str) {
+ StringBuffer retval = new StringBuffer();
+ char ch;
+ for (int i = 0; i < str.length(); i++) {
+ switch (str.charAt(i))
+ {
+ case 0 :
+ continue;
+ case '\b':
+ retval.append("\\b");
+ continue;
+ case '\t':
+ retval.append("\\t");
+ continue;
+ case '\n':
+ retval.append("\\n");
+ continue;
+ case '\f':
+ retval.append("\\f");
+ continue;
+ case '\r':
+ retval.append("\\r");
+ continue;
+ case '\"':
+ retval.append("\\\"");
+ continue;
+ case '\'':
+ retval.append("\\\'");
+ continue;
+ case '\\':
+ retval.append("\\\\");
+ continue;
+ default:
+ if ((ch = str.charAt(i)) < 0x20 || ch > 0x7e) {
+ String s = "0000" + Integer.toString(ch, 16);
+ retval.append("\\u" + s.substring(s.length() - 4, s.length()));
+ } else {
+ retval.append(ch);
+ }
+ continue;
+ }
+ }
+ return retval.toString();
+ }
+
+ /**
+ * Returns a detailed message for the Error when it is thrown by the
+ * token manager to indicate a lexical error.
+ * Parameters :
+ * EOFSeen : indicates if EOF caused the lexicl error
+ * curLexState : lexical state in which this error occured
+ * errorLine : line number when the error occured
+ * errorColumn : column number when the error occured
+ * errorAfter : prefix that was seen before this error occured
+ * curchar : the offending character
+ * Note: You can customize the lexical error message by modifying this method.
+ */
+ protected static String LexicalError(boolean EOFSeen, int lexState, int errorLine, int errorColumn, String errorAfter, char curChar) {
+ return("Lexical error at line " +
+ errorLine + ", column " +
+ errorColumn + ". Encountered: " +
+ (EOFSeen ? " " : ("\"" + addEscapes(String.valueOf(curChar)) + "\"") + " (" + (int)curChar + "), ") +
+ "after : \"" + addEscapes(errorAfter) + "\"");
+ }
+
+ /**
+ * You can also modify the body of this method to customize your error messages.
+ * For example, cases like LOOP_DETECTED and INVALID_LEXICAL_STATE are not
+ * of end-users concern, so you can return something like :
+ *
+ * "Internal Error : Please file a bug report .... "
+ *
+ * from this method for such cases in the release version of your parser.
+ */
+ @Override
+ public String getMessage() {
+ return super.getMessage();
+ }
+
+ /*
+ * Constructors of various flavors follow.
+ */
+
+ public TokenMgrError() {
+ }
+
+ public TokenMgrError(String message, int reason) {
+ super(message);
+ errorCode = reason;
+ }
+
+ public TokenMgrError(boolean EOFSeen, int lexState, int errorLine, int errorColumn, String errorAfter, char curChar, int reason) {
+ this(LexicalError(EOFSeen, lexState, errorLine, errorColumn, errorAfter, curChar), reason);
+ }
+}
diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/generated/package.html b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/generated/package.html
new file mode 100644
index 00000000000..d83fcf39494
--- /dev/null
+++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/generated/package.html
@@ -0,0 +1,35 @@
+
+
+
+
+
+
+ Hadoop Record Compiler: Parser
+
+
+
+ (DEPRECATED) This package contains code generated by JavaCC from the
+ Hadoop record syntax file rcc.jj. For details about the
+ record file syntax please @see org.apache.hadoop.record.
+
+
+
+ DEPRECATED: Replaced by Avro.
+
+
+
diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/generated/rcc.jj b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/generated/rcc.jj
new file mode 100644
index 00000000000..4eeae3e47db
--- /dev/null
+++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/generated/rcc.jj
@@ -0,0 +1,384 @@
+options {
+STATIC=false;
+}
+
+PARSER_BEGIN(Rcc)
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record.compiler.generated;
+
+import org.apache.hadoop.record.compiler.*;
+import java.util.ArrayList;
+import java.util.Hashtable;
+import java.util.Iterator;
+import java.io.File;
+import java.io.FileReader;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+
+public class Rcc {
+ private static String language = "java";
+ private static String destDir = ".";
+ private static ArrayList recFiles = new ArrayList();
+ private static ArrayList cmdargs = new ArrayList();
+ private static JFile curFile;
+ private static Hashtable recTab;
+ private static String curDir = ".";
+ private static String curFileName;
+ private static String curModuleName;
+
+ public static void main(String[] args) {
+ System.exit(driver(args));
+ }
+
+ public static void usage() {
+ System.err.println("Usage: rcc --language [java|c++] ddl-files");
+ }
+
+ public static int driver(String[] args) {
+ for (int i=0; i();
+ curFile = parser.Input();
+ } catch (ParseException e) {
+ System.err.println(e.toString());
+ return 1;
+ }
+ try {
+ reader.close();
+ } catch (IOException e) {
+ }
+ } catch (FileNotFoundException e) {
+ System.err.println("File " + (String) recFiles.get(i) +
+ " Not found.");
+ return 1;
+ }
+ try {
+ int retCode = curFile.genCode(language, destDir, cmdargs);
+ if (retCode != 0) { return retCode; }
+ } catch (IOException e) {
+ System.err.println(e.toString());
+ return 1;
+ }
+ }
+ return 0;
+ }
+}
+
+PARSER_END(Rcc)
+
+SKIP :
+{
+ " "
+| "\t"
+| "\n"
+| "\r"
+}
+
+SPECIAL_TOKEN :
+{
+ "//" : WithinOneLineComment
+}
+
+ SPECIAL_TOKEN :
+{
+ <("\n" | "\r" | "\r\n" )> : DEFAULT
+}
+
+ MORE :
+{
+ <~[]>
+}
+
+SPECIAL_TOKEN :
+{
+ "/*" : WithinMultiLineComment
+}
+
+ SPECIAL_TOKEN :
+{
+ "*/" : DEFAULT
+}
+
+ MORE :
+{
+ <~[]>
+}
+
+TOKEN :
+{
+
+|
+|
+|
+|
+|
+|
+|
+|
+|
+|
+|
+|
+|
+|
+|
+| ">
+|
+|
+|
+|
+|
+}
+
+JFile Input() :
+{
+ ArrayList ilist = new ArrayList();
+ ArrayList rlist = new ArrayList();
+ JFile i;
+ ArrayList l;
+}
+{
+ (
+ i = Include()
+ { ilist.add(i); }
+ | l = Module()
+ { rlist.addAll(l); }
+ )+
+
+ { return new JFile(curFileName, ilist, rlist); }
+}
+
+JFile Include() :
+{
+ String fname;
+ Token t;
+}
+{
+
+ t =
+ {
+ JFile ret = null;
+ fname = t.image.replaceAll("^\"", "").replaceAll("\"$","");
+ File file = new File(curDir, fname);
+ String tmpDir = curDir;
+ String tmpFile = curFileName;
+ curDir = file.getParent();
+ curFileName = file.getName();
+ try {
+ FileReader reader = new FileReader(file);
+ Rcc parser = new Rcc(reader);
+ try {
+ ret = parser.Input();
+ System.out.println(fname + " Parsed Successfully");
+ } catch (ParseException e) {
+ System.out.println(e.toString());
+ System.exit(1);
+ }
+ try {
+ reader.close();
+ } catch (IOException e) {
+ }
+ } catch (FileNotFoundException e) {
+ System.out.println("File " + fname +
+ " Not found.");
+ System.exit(1);
+ }
+ curDir = tmpDir;
+ curFileName = tmpFile;
+ return ret;
+ }
+}
+
+ArrayList Module() :
+{
+ String mName;
+ ArrayList rlist;
+}
+{
+
+ mName = ModuleName()
+ { curModuleName = mName; }
+
+ rlist = RecordList()
+
+ { return rlist; }
+}
+
+String ModuleName() :
+{
+ String name = "";
+ Token t;
+}
+{
+ t =
+ { name += t.image; }
+ (
+
+ t =
+ { name += "." + t.image; }
+ )*
+ { return name; }
+}
+
+ArrayList RecordList() :
+{
+ ArrayList rlist = new ArrayList();
+ JRecord r;
+}
+{
+ (
+ r = Record()
+ { rlist.add(r); }
+ )+
+ { return rlist; }
+}
+
+JRecord Record() :
+{
+ String rname;
+ ArrayList> flist = new ArrayList>();
+ Token t;
+ JField f;
+}
+{
+
+ t =
+ { rname = t.image; }
+
+ (
+ f = Field()
+ { flist.add(f); }
+
+ )+
+
+ {
+ String fqn = curModuleName + "." + rname;
+ JRecord r = new JRecord(fqn, flist);
+ recTab.put(fqn, r);
+ return r;
+ }
+}
+
+JField Field() :
+{
+ JType jt;
+ Token t;
+}
+{
+ jt = Type()
+ t =
+ { return new JField(t.image, jt); }
+}
+
+JType Type() :
+{
+ JType jt;
+ Token t;
+ String rname;
+}
+{
+ jt = Map()
+ { return jt; }
+| jt = Vector()
+ { return jt; }
+|
+ { return new JByte(); }
+|
+ { return new JBoolean(); }
+|
+ { return new JInt(); }
+|
+ { return new JLong(); }
+|
+ { return new JFloat(); }
+|
+ { return new JDouble(); }
+|
+ { return new JString(); }
+|
+ { return new JBuffer(); }
+| rname = ModuleName()
+ {
+ if (rname.indexOf('.', 0) < 0) {
+ rname = curModuleName + "." + rname;
+ }
+ JRecord r = recTab.get(rname);
+ if (r == null) {
+ System.out.println("Type " + rname + " not known. Exiting.");
+ System.exit(1);
+ }
+ return r;
+ }
+}
+
+JMap Map() :
+{
+ JType jt1;
+ JType jt2;
+}
+{
+
+
+ jt1 = Type()
+
+ jt2 = Type()
+
+ { return new JMap(jt1, jt2); }
+}
+
+JVector Vector() :
+{
+ JType jt;
+}
+{
+
+
+ jt = Type()
+
+ { return new JVector(jt); }
+}
diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/package.html b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/package.html
new file mode 100644
index 00000000000..97604e824a8
--- /dev/null
+++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/compiler/package.html
@@ -0,0 +1,37 @@
+
+
+
+
+
+
+ Hadoop Record Compiler
+
+
+
+ (DEPRECATED) This package contains classes needed for code generation
+ from the hadoop record compiler. CppGenerator and JavaGenerator
+ are the main entry points from the parser. There are classes
+ corrsponding to every primitive type and compound type
+ included in Hadoop record I/O syntax.
+
+
+
+ DEPRECATED: Replaced by Avro.
+
+
+
diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/meta/FieldTypeInfo.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/meta/FieldTypeInfo.java
new file mode 100644
index 00000000000..32436abf829
--- /dev/null
+++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/meta/FieldTypeInfo.java
@@ -0,0 +1,107 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record.meta;
+
+import java.io.IOException;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.record.RecordOutput;
+
+/**
+ * Represents a type information for a field, which is made up of its
+ * ID (name) and its type (a TypeID object).
+ *
+ * @deprecated Replaced by Avro.
+ */
+@Deprecated
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public class FieldTypeInfo
+{
+
+ private String fieldID;
+ private TypeID typeID;
+
+ /**
+ * Construct a FiledTypeInfo with the given field name and the type
+ */
+ FieldTypeInfo(String fieldID, TypeID typeID) {
+ this.fieldID = fieldID;
+ this.typeID = typeID;
+ }
+
+ /**
+ * get the field's TypeID object
+ */
+ public TypeID getTypeID() {
+ return typeID;
+ }
+
+ /**
+ * get the field's id (name)
+ */
+ public String getFieldID() {
+ return fieldID;
+ }
+
+ void write(RecordOutput rout, String tag) throws IOException {
+ rout.writeString(fieldID, tag);
+ typeID.write(rout, tag);
+ }
+
+ /**
+ * Two FieldTypeInfos are equal if ach of their fields matches
+ */
+ @Override
+ public boolean equals(Object o) {
+ if (this == o)
+ return true;
+ if (!(o instanceof FieldTypeInfo))
+ return false;
+ FieldTypeInfo fti = (FieldTypeInfo) o;
+ // first check if fieldID matches
+ if (!this.fieldID.equals(fti.fieldID)) {
+ return false;
+ }
+ // now see if typeID matches
+ return (this.typeID.equals(fti.typeID));
+ }
+
+ /**
+ * We use a basic hashcode implementation, since this class will likely not
+ * be used as a hashmap key
+ */
+ @Override
+ public int hashCode() {
+ return 37*17+typeID.hashCode() + 37*17+fieldID.hashCode();
+ }
+
+
+ public boolean equals(FieldTypeInfo ti) {
+ // first check if fieldID matches
+ if (!this.fieldID.equals(ti.fieldID)) {
+ return false;
+ }
+ // now see if typeID matches
+ return (this.typeID.equals(ti.typeID));
+ }
+
+}
+
diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/meta/MapTypeID.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/meta/MapTypeID.java
new file mode 100644
index 00000000000..f9c5320cfbc
--- /dev/null
+++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/meta/MapTypeID.java
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record.meta;
+
+import java.io.IOException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.record.RecordOutput;
+
+/**
+ * Represents typeID for a Map
+ *
+ * @deprecated Replaced by Avro.
+ */
+@Deprecated
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public class MapTypeID extends TypeID {
+
+ private TypeID typeIDKey;
+ private TypeID typeIDValue;
+
+ public MapTypeID(TypeID typeIDKey, TypeID typeIDValue) {
+ super(RIOType.MAP);
+ this.typeIDKey = typeIDKey;
+ this.typeIDValue = typeIDValue;
+ }
+
+ /**
+ * get the TypeID of the map's key element
+ */
+ public TypeID getKeyTypeID() {
+ return this.typeIDKey;
+ }
+
+ /**
+ * get the TypeID of the map's value element
+ */
+ public TypeID getValueTypeID() {
+ return this.typeIDValue;
+ }
+
+ @Override
+ void write(RecordOutput rout, String tag) throws IOException {
+ rout.writeByte(typeVal, tag);
+ typeIDKey.write(rout, tag);
+ typeIDValue.write(rout, tag);
+ }
+
+ /**
+ * Two map typeIDs are equal if their constituent elements have the
+ * same type
+ */
+ @Override
+ public boolean equals(Object o) {
+ if (!super.equals(o))
+ return false;
+
+ MapTypeID mti = (MapTypeID) o;
+
+ return this.typeIDKey.equals(mti.typeIDKey) &&
+ this.typeIDValue.equals(mti.typeIDValue);
+ }
+
+ /**
+ * We use a basic hashcode implementation, since this class will likely not
+ * be used as a hashmap key
+ */
+ @Override
+ public int hashCode() {
+ return 37*17+typeIDKey.hashCode() + 37*17+typeIDValue.hashCode();
+ }
+
+}
diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/meta/RecordTypeInfo.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/meta/RecordTypeInfo.java
new file mode 100644
index 00000000000..8a9d0b5fbbe
--- /dev/null
+++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/meta/RecordTypeInfo.java
@@ -0,0 +1,161 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record.meta;
+
+import java.io.IOException;
+import java.util.*;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.record.RecordInput;
+import org.apache.hadoop.record.RecordOutput;
+
+
+/**
+ * A record's Type Information object which can read/write itself.
+ *
+ * Type information for a record comprises metadata about the record,
+ * as well as a collection of type information for each field in the record.
+ *
+ * @deprecated Replaced by Avro.
+ */
+@Deprecated
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public class RecordTypeInfo extends org.apache.hadoop.record.Record
+{
+
+ private String name;
+ // A RecordTypeInfo is really just a wrapper around StructTypeID
+ StructTypeID sTid;
+ // A RecordTypeInfo object is just a collection of TypeInfo objects for each of its fields.
+ //private ArrayList typeInfos = new ArrayList();
+ // we keep a hashmap of struct/record names and their type information, as we need it to
+ // set filters when reading nested structs. This map is used during deserialization.
+ //private Map structRTIs = new HashMap();
+
+ /**
+ * Create an empty RecordTypeInfo object.
+ */
+ public RecordTypeInfo() {
+ sTid = new StructTypeID();
+ }
+
+ /**
+ * Create a RecordTypeInfo object representing a record with the given name
+ * @param name Name of the record
+ */
+ public RecordTypeInfo(String name) {
+ this.name = name;
+ sTid = new StructTypeID();
+ }
+
+ /*
+ * private constructor
+ */
+ private RecordTypeInfo(String name, StructTypeID stid) {
+ this.sTid = stid;
+ this.name = name;
+ }
+
+ /**
+ * return the name of the record
+ */
+ public String getName() {
+ return name;
+ }
+
+ /**
+ * set the name of the record
+ */
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ /**
+ * Add a field.
+ * @param fieldName Name of the field
+ * @param tid Type ID of the field
+ */
+ public void addField(String fieldName, TypeID tid) {
+ sTid.getFieldTypeInfos().add(new FieldTypeInfo(fieldName, tid));
+ }
+
+ private void addAll(Collection tis) {
+ sTid.getFieldTypeInfos().addAll(tis);
+ }
+
+ /**
+ * Return a collection of field type infos
+ */
+ public Collection getFieldTypeInfos() {
+ return sTid.getFieldTypeInfos();
+ }
+
+ /**
+ * Return the type info of a nested record. We only consider nesting
+ * to one level.
+ * @param name Name of the nested record
+ */
+ public RecordTypeInfo getNestedStructTypeInfo(String name) {
+ StructTypeID stid = sTid.findStruct(name);
+ if (null == stid) return null;
+ return new RecordTypeInfo(name, stid);
+ }
+
+ /**
+ * Serialize the type information for a record
+ */
+ @Override
+ public void serialize(RecordOutput rout, String tag) throws IOException {
+ // write out any header, version info, here
+ rout.startRecord(this, tag);
+ rout.writeString(name, tag);
+ sTid.writeRest(rout, tag);
+ rout.endRecord(this, tag);
+ }
+
+ /**
+ * Deserialize the type information for a record
+ */
+ @Override
+ public void deserialize(RecordInput rin, String tag) throws IOException {
+ // read in any header, version info
+ rin.startRecord(tag);
+ // name
+ this.name = rin.readString(tag);
+ sTid.read(rin, tag);
+ rin.endRecord(tag);
+ }
+
+ /**
+ * This class doesn't implement Comparable as it's not meant to be used
+ * for anything besides de/serializing.
+ * So we always throw an exception.
+ * Not implemented. Always returns 0 if another RecordTypeInfo is passed in.
+ */
+ @Override
+ public int compareTo (final Object peer_) throws ClassCastException {
+ if (!(peer_ instanceof RecordTypeInfo)) {
+ throw new ClassCastException("Comparing different types of records.");
+ }
+ throw new UnsupportedOperationException("compareTo() is not supported");
+ }
+}
+
diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/meta/StructTypeID.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/meta/StructTypeID.java
new file mode 100644
index 00000000000..d2c9ccdc753
--- /dev/null
+++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/meta/StructTypeID.java
@@ -0,0 +1,166 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record.meta;
+
+import java.io.IOException;
+import java.util.*;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.record.RecordInput;
+import org.apache.hadoop.record.RecordOutput;
+
+/**
+ * Represents typeID for a struct
+ *
+ * @deprecated Replaced by Avro.
+ */
+@Deprecated
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public class StructTypeID extends TypeID {
+ private ArrayList typeInfos = new ArrayList();
+
+ StructTypeID() {
+ super(RIOType.STRUCT);
+ }
+
+ /**
+ * Create a StructTypeID based on the RecordTypeInfo of some record
+ */
+ public StructTypeID(RecordTypeInfo rti) {
+ super(RIOType.STRUCT);
+ typeInfos.addAll(rti.getFieldTypeInfos());
+ }
+
+ void add (FieldTypeInfo ti) {
+ typeInfos.add(ti);
+ }
+
+ public Collection getFieldTypeInfos() {
+ return typeInfos;
+ }
+
+ /*
+ * return the StructTypeiD, if any, of the given field
+ */
+ StructTypeID findStruct(String name) {
+ // walk through the list, searching. Not the most efficient way, but this
+ // in intended to be used rarely, so we keep it simple.
+ // As an optimization, we can keep a hashmap of record name to its RTI, for later.
+ for (FieldTypeInfo ti : typeInfos) {
+ if ((0 == ti.getFieldID().compareTo(name)) && (ti.getTypeID().getTypeVal() == RIOType.STRUCT)) {
+ return (StructTypeID) ti.getTypeID();
+ }
+ }
+ return null;
+ }
+
+ @Override
+ void write(RecordOutput rout, String tag) throws IOException {
+ rout.writeByte(typeVal, tag);
+ writeRest(rout, tag);
+ }
+
+ /*
+ * Writes rest of the struct (excluding type value).
+ * As an optimization, this method is directly called by RTI
+ * for the top level record so that we don't write out the byte
+ * indicating that this is a struct (since top level records are
+ * always structs).
+ */
+ void writeRest(RecordOutput rout, String tag) throws IOException {
+ rout.writeInt(typeInfos.size(), tag);
+ for (FieldTypeInfo ti : typeInfos) {
+ ti.write(rout, tag);
+ }
+ }
+
+ /*
+ * deserialize ourselves. Called by RTI.
+ */
+ void read(RecordInput rin, String tag) throws IOException {
+ // number of elements
+ int numElems = rin.readInt(tag);
+ for (int i=0; iAvro.
+ */
+@Deprecated
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public class TypeID {
+
+ /**
+ * constants representing the IDL types we support
+ */
+ public static final class RIOType {
+ public static final byte BOOL = 1;
+ public static final byte BUFFER = 2;
+ public static final byte BYTE = 3;
+ public static final byte DOUBLE = 4;
+ public static final byte FLOAT = 5;
+ public static final byte INT = 6;
+ public static final byte LONG = 7;
+ public static final byte MAP = 8;
+ public static final byte STRING = 9;
+ public static final byte STRUCT = 10;
+ public static final byte VECTOR = 11;
+ }
+
+ /**
+ * Constant classes for the basic types, so we can share them.
+ */
+ public static final TypeID BoolTypeID = new TypeID(RIOType.BOOL);
+ public static final TypeID BufferTypeID = new TypeID(RIOType.BUFFER);
+ public static final TypeID ByteTypeID = new TypeID(RIOType.BYTE);
+ public static final TypeID DoubleTypeID = new TypeID(RIOType.DOUBLE);
+ public static final TypeID FloatTypeID = new TypeID(RIOType.FLOAT);
+ public static final TypeID IntTypeID = new TypeID(RIOType.INT);
+ public static final TypeID LongTypeID = new TypeID(RIOType.LONG);
+ public static final TypeID StringTypeID = new TypeID(RIOType.STRING);
+
+ protected byte typeVal;
+
+ /**
+ * Create a TypeID object
+ */
+ TypeID(byte typeVal) {
+ this.typeVal = typeVal;
+ }
+
+ /**
+ * Get the type value. One of the constants in RIOType.
+ */
+ public byte getTypeVal() {
+ return typeVal;
+ }
+
+ /**
+ * Serialize the TypeID object
+ */
+ void write(RecordOutput rout, String tag) throws IOException {
+ rout.writeByte(typeVal, tag);
+ }
+
+ /**
+ * Two base typeIDs are equal if they refer to the same type
+ */
+ @Override
+ public boolean equals(Object o) {
+ if (this == o)
+ return true;
+
+ if (o == null)
+ return false;
+
+ if (this.getClass() != o.getClass())
+ return false;
+
+ TypeID oTypeID = (TypeID) o;
+ return (this.typeVal == oTypeID.typeVal);
+ }
+
+ /**
+ * We use a basic hashcode implementation, since this class will likely not
+ * be used as a hashmap key
+ */
+ @Override
+ public int hashCode() {
+ // See 'Effectve Java' by Joshua Bloch
+ return 37*17+(int)typeVal;
+ }
+}
+
diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/meta/Utils.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/meta/Utils.java
new file mode 100644
index 00000000000..f7a22cb3445
--- /dev/null
+++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/meta/Utils.java
@@ -0,0 +1,104 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record.meta;
+
+import java.io.IOException;
+import java.util.Iterator;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.record.RecordInput;
+
+/**
+ * Various utility functions for Hadooop record I/O platform.
+ *
+ * @deprecated Replaced by Avro.
+ */
+@Deprecated
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public class Utils {
+
+ /** Cannot create a new instance of Utils */
+ private Utils() {
+ }
+
+ /**
+ * read/skip bytes from stream based on a type
+ */
+ public static void skip(RecordInput rin, String tag, TypeID typeID) throws IOException {
+ switch (typeID.typeVal) {
+ case TypeID.RIOType.BOOL:
+ rin.readBool(tag);
+ break;
+ case TypeID.RIOType.BUFFER:
+ rin.readBuffer(tag);
+ break;
+ case TypeID.RIOType.BYTE:
+ rin.readByte(tag);
+ break;
+ case TypeID.RIOType.DOUBLE:
+ rin.readDouble(tag);
+ break;
+ case TypeID.RIOType.FLOAT:
+ rin.readFloat(tag);
+ break;
+ case TypeID.RIOType.INT:
+ rin.readInt(tag);
+ break;
+ case TypeID.RIOType.LONG:
+ rin.readLong(tag);
+ break;
+ case TypeID.RIOType.MAP:
+ org.apache.hadoop.record.Index midx1 = rin.startMap(tag);
+ MapTypeID mtID = (MapTypeID) typeID;
+ for (; !midx1.done(); midx1.incr()) {
+ skip(rin, tag, mtID.getKeyTypeID());
+ skip(rin, tag, mtID.getValueTypeID());
+ }
+ rin.endMap(tag);
+ break;
+ case TypeID.RIOType.STRING:
+ rin.readString(tag);
+ break;
+ case TypeID.RIOType.STRUCT:
+ rin.startRecord(tag);
+ // read past each field in the struct
+ StructTypeID stID = (StructTypeID) typeID;
+ Iterator it = stID.getFieldTypeInfos().iterator();
+ while (it.hasNext()) {
+ FieldTypeInfo tInfo = it.next();
+ skip(rin, tag, tInfo.getTypeID());
+ }
+ rin.endRecord(tag);
+ break;
+ case TypeID.RIOType.VECTOR:
+ org.apache.hadoop.record.Index vidx1 = rin.startVector(tag);
+ VectorTypeID vtID = (VectorTypeID) typeID;
+ for (; !vidx1.done(); vidx1.incr()) {
+ skip(rin, tag, vtID.getElementTypeID());
+ }
+ rin.endVector(tag);
+ break;
+ default:
+ // shouldn't be here
+ throw new IOException("Unknown typeID when skipping bytes");
+ }
+ }
+}
diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/meta/VectorTypeID.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/meta/VectorTypeID.java
new file mode 100644
index 00000000000..22ab07efdc2
--- /dev/null
+++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/meta/VectorTypeID.java
@@ -0,0 +1,74 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record.meta;
+
+import java.io.IOException;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.record.RecordOutput;
+
+/**
+ * Represents typeID for vector.
+ * @deprecated Replaced by Avro.
+ */
+@Deprecated
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public class VectorTypeID extends TypeID {
+ private TypeID typeIDElement;
+
+ public VectorTypeID(TypeID typeIDElement) {
+ super(RIOType.VECTOR);
+ this.typeIDElement = typeIDElement;
+ }
+
+ public TypeID getElementTypeID() {
+ return this.typeIDElement;
+ }
+
+ @Override
+ void write(RecordOutput rout, String tag) throws IOException {
+ rout.writeByte(typeVal, tag);
+ typeIDElement.write(rout, tag);
+ }
+
+ /**
+ * Two vector typeIDs are equal if their constituent elements have the
+ * same type
+ */
+ @Override
+ public boolean equals(Object o) {
+ if (!super.equals (o))
+ return false;
+
+ VectorTypeID vti = (VectorTypeID) o;
+ return this.typeIDElement.equals(vti.typeIDElement);
+ }
+
+ /**
+ * We use a basic hashcode implementation, since this class will likely not
+ * be used as a hashmap key
+ */
+ @Override
+ public int hashCode() {
+ return 37*17+typeIDElement.hashCode();
+ }
+
+}