001    /**
002     * Licensed to the Apache Software Foundation (ASF) under one
003     * or more contributor license agreements.  See the NOTICE file
004     * distributed with this work for additional information
005     * regarding copyright ownership.  The ASF licenses this file
006     * to you under the Apache License, Version 2.0 (the
007     * "License"); you may not use this file except in compliance
008     * with the License.  You may obtain a copy of the License at
009     *
010     *     http://www.apache.org/licenses/LICENSE-2.0
011     *
012     * Unless required by applicable law or agreed to in writing, software
013     * distributed under the License is distributed on an "AS IS" BASIS,
014     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015     * See the License for the specific language governing permissions and
016     * limitations under the License.
017     */
018    
019    package org.apache.hadoop.record.meta;
020    
021    import java.io.IOException;
022    import java.util.Iterator;
023    
024    import org.apache.hadoop.classification.InterfaceAudience;
025    import org.apache.hadoop.classification.InterfaceStability;
026    import org.apache.hadoop.record.RecordInput;
027    
028    /**
029     * Various utility functions for Hadooop record I/O platform.
030     * 
031     * @deprecated Replaced by <a href="https://hadoop.apache.org/avro/">Avro</a>.
032     */
033    @Deprecated
034    @InterfaceAudience.Public
035    @InterfaceStability.Stable
036    public class Utils {
037      
038      /** Cannot create a new instance of Utils */
039      private Utils() {
040      }
041      
042      /**
043       * read/skip bytes from stream based on a type
044       */
045      public static void skip(RecordInput rin, String tag, TypeID typeID) throws IOException {
046        switch (typeID.typeVal) {
047        case TypeID.RIOType.BOOL: 
048          rin.readBool(tag);
049          break;
050        case TypeID.RIOType.BUFFER: 
051          rin.readBuffer(tag);
052          break;
053        case TypeID.RIOType.BYTE: 
054          rin.readByte(tag);
055          break;
056        case TypeID.RIOType.DOUBLE: 
057          rin.readDouble(tag);
058          break;
059        case TypeID.RIOType.FLOAT: 
060          rin.readFloat(tag);
061          break;
062        case TypeID.RIOType.INT: 
063          rin.readInt(tag);
064          break;
065        case TypeID.RIOType.LONG: 
066          rin.readLong(tag);
067          break;
068        case TypeID.RIOType.MAP: 
069          org.apache.hadoop.record.Index midx1 = rin.startMap(tag);
070          MapTypeID mtID = (MapTypeID) typeID;
071          for (; !midx1.done(); midx1.incr()) {
072            skip(rin, tag, mtID.getKeyTypeID());
073            skip(rin, tag, mtID.getValueTypeID());
074          }
075          rin.endMap(tag);
076          break;
077        case TypeID.RIOType.STRING: 
078          rin.readString(tag);
079          break;
080        case TypeID.RIOType.STRUCT:
081          rin.startRecord(tag);
082          // read past each field in the struct
083          StructTypeID stID = (StructTypeID) typeID;
084          Iterator<FieldTypeInfo> it = stID.getFieldTypeInfos().iterator();
085          while (it.hasNext()) {
086            FieldTypeInfo tInfo = it.next();
087            skip(rin, tag, tInfo.getTypeID());
088          }
089          rin.endRecord(tag);
090          break;
091        case TypeID.RIOType.VECTOR: 
092          org.apache.hadoop.record.Index vidx1 = rin.startVector(tag);
093          VectorTypeID vtID = (VectorTypeID) typeID;
094          for (; !vidx1.done(); vidx1.incr()) {
095            skip(rin, tag, vtID.getElementTypeID());
096          }
097          rin.endVector(tag);
098          break;
099        default: 
100          // shouldn't be here
101          throw new IOException("Unknown typeID when skipping bytes");
102        }
103      }
104    }