001/**
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.record.compiler.ant;
019
020import java.io.File;
021import java.util.ArrayList;
022
023import org.apache.hadoop.classification.InterfaceAudience;
024import org.apache.hadoop.classification.InterfaceStability;
025import org.apache.hadoop.record.compiler.generated.Rcc;
026import org.apache.tools.ant.BuildException;
027import org.apache.tools.ant.DirectoryScanner;
028import org.apache.tools.ant.Project;
029import org.apache.tools.ant.Task;
030import org.apache.tools.ant.types.FileSet;
031
032/**
033 * Hadoop record compiler ant Task
034 *<p> This task takes the given record definition files and compiles them into
035 * java or c++
036 * files. It is then up to the user to compile the generated files.
037 *
038 * <p> The task requires the <code>file</code> or the nested fileset element to be
039 * specified. Optional attributes are <code>language</code> (set the output
040 * language, default is "java"),
041 * <code>destdir</code> (name of the destination directory for generated java/c++
042 * code, default is ".") and <code>failonerror</code> (specifies error handling
043 * behavior. default is true).
044 * <p><h4>Usage</h4>
045 * <pre>
046 * &lt;recordcc
047 *       destdir="${basedir}/gensrc"
048 *       language="java"&gt;
049 *   &lt;fileset include="**\/*.jr" /&gt;
050 * &lt;/recordcc&gt;
051 * </pre>
052 * 
053 * @deprecated Replaced by <a href="https://hadoop.apache.org/avro/">Avro</a>.
054 */
055@Deprecated
056@InterfaceAudience.Public
057@InterfaceStability.Stable
058public class RccTask extends Task {
059  
060  private String language = "java";
061  private File src;
062  private File dest = new File(".");
063  private final ArrayList<FileSet> filesets = new ArrayList<FileSet>();
064  private boolean failOnError = true;
065  
066  /** Creates a new instance of RccTask */
067  public RccTask() {
068  }
069  
070  /**
071   * Sets the output language option
072   * @param language "java"/"c++"
073   */
074  public void setLanguage(String language) {
075    this.language = language;
076  }
077  
078  /**
079   * Sets the record definition file attribute
080   * @param file record definition file
081   */
082  public void setFile(File file) {
083    this.src = file;
084  }
085  
086  /**
087   * Given multiple files (via fileset), set the error handling behavior
088   * @param flag true will throw build exception in case of failure (default)
089   */
090  public void setFailonerror(boolean flag) {
091    this.failOnError = flag;
092  }
093  
094  /**
095   * Sets directory where output files will be generated
096   * @param dir output directory
097   */
098  public void setDestdir(File dir) {
099    this.dest = dir;
100  }
101  
102  /**
103   * Adds a fileset that can consist of one or more files
104   * @param set Set of record definition files
105   */
106  public void addFileset(FileSet set) {
107    filesets.add(set);
108  }
109  
110  /**
111   * Invoke the Hadoop record compiler on each record definition file
112   */
113  @Override
114  public void execute() throws BuildException {
115    if (src == null && filesets.size()==0) {
116      throw new BuildException("There must be a file attribute or a fileset child element");
117    }
118    if (src != null) {
119      doCompile(src);
120    }
121    Project myProject = getProject();
122    for (int i = 0; i < filesets.size(); i++) {
123      FileSet fs = filesets.get(i);
124      DirectoryScanner ds = fs.getDirectoryScanner(myProject);
125      File dir = fs.getDir(myProject);
126      String[] srcs = ds.getIncludedFiles();
127      for (int j = 0; j < srcs.length; j++) {
128        doCompile(new File(dir, srcs[j]));
129      }
130    }
131  }
132  
133  private void doCompile(File file) throws BuildException {
134    String[] args = new String[5];
135    args[0] = "--language";
136    args[1] = this.language;
137    args[2] = "--destdir";
138    args[3] = this.dest.getPath();
139    args[4] = file.getPath();
140    int retVal = Rcc.driver(args);
141    if (retVal != 0 && failOnError) {
142      throw new BuildException("Hadoop record compiler returned error code "+retVal);
143    }
144  }
145}