001/**
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.mapred;
019
020import java.util.ArrayList;
021import java.util.Collection;
022import java.util.List;
023
024import org.apache.hadoop.classification.InterfaceAudience;
025import org.apache.hadoop.classification.InterfaceStability;
026
027/** 
028 * A report on the state of a task. 
029 */
030@InterfaceAudience.Public
031@InterfaceStability.Stable
032public class TaskReport extends org.apache.hadoop.mapreduce.TaskReport {
033  
034  public TaskReport() {
035    super();
036  }
037  
038  /**
039   * Creates a new TaskReport object
040   * @param taskid
041   * @param progress
042   * @param state
043   * @param diagnostics
044   * @param startTime
045   * @param finishTime
046   * @param counters
047   * @deprecated
048   */
049  @Deprecated
050  TaskReport(TaskID taskid, float progress, String state,
051      String[] diagnostics, long startTime, long finishTime,
052      Counters counters) {
053    this(taskid, progress, state, diagnostics, null, startTime, finishTime, 
054        counters);
055  }
056  
057  /**
058   * Creates a new TaskReport object
059   * @param taskid
060   * @param progress
061   * @param state
062   * @param diagnostics
063   * @param currentStatus
064   * @param startTime
065   * @param finishTime
066   * @param counters
067   */
068  TaskReport(TaskID taskid, float progress, String state,
069             String[] diagnostics, TIPStatus currentStatus, 
070             long startTime, long finishTime,
071             Counters counters) {
072    super(taskid, progress, state, diagnostics, currentStatus, startTime,
073      finishTime, new org.apache.hadoop.mapreduce.Counters(counters));
074  }
075  
076  static TaskReport downgrade(
077      org.apache.hadoop.mapreduce.TaskReport report) {
078    return new TaskReport(TaskID.downgrade(report.getTaskId()),
079      report.getProgress(), report.getState(), report.getDiagnostics(),
080      report.getCurrentStatus(), report.getStartTime(), report.getFinishTime(),
081      Counters.downgrade(report.getTaskCounters()));
082  }
083  
084  static TaskReport[] downgradeArray(org.apache.hadoop.
085      mapreduce.TaskReport[] reports) {
086    List<TaskReport> ret = new ArrayList<TaskReport>();
087    for (org.apache.hadoop.mapreduce.TaskReport report : reports) {
088      ret.add(downgrade(report));
089    }
090    return ret.toArray(new TaskReport[0]);
091  }
092  
093  /** The id of the task. */
094  public TaskID getTaskID() { return TaskID.downgrade(super.getTaskId()); }
095  
096  public Counters getCounters() { 
097    return Counters.downgrade(super.getTaskCounters()); 
098  }
099  
100  /** 
101   * set successful attempt ID of the task. 
102   */ 
103  public void setSuccessfulAttempt(TaskAttemptID t) {
104    super.setSuccessfulAttemptId(t);
105  }
106  /**
107   * Get the attempt ID that took this task to completion
108   */
109  public TaskAttemptID getSuccessfulTaskAttempt() {
110    return TaskAttemptID.downgrade(super.getSuccessfulTaskAttemptId());
111  }
112  /** 
113   * set running attempt(s) of the task. 
114   */ 
115  public void setRunningTaskAttempts(
116      Collection<TaskAttemptID> runningAttempts) {
117    Collection<org.apache.hadoop.mapreduce.TaskAttemptID> attempts = 
118      new ArrayList<org.apache.hadoop.mapreduce.TaskAttemptID>();
119    for (TaskAttemptID id : runningAttempts) {
120      attempts.add(id);
121    }
122    super.setRunningTaskAttemptIds(attempts);
123  }
124  /**
125   * Get the running task attempt IDs for this task
126   */
127  public Collection<TaskAttemptID> getRunningTaskAttempts() {
128    Collection<TaskAttemptID> attempts = new ArrayList<TaskAttemptID>();
129    for (org.apache.hadoop.mapreduce.TaskAttemptID id : 
130         super.getRunningTaskAttemptIds()) {
131      attempts.add(TaskAttemptID.downgrade(id));
132    }
133    return attempts;
134  }
135  
136  /** 
137   * set finish time of task. 
138   * @param finishTime finish time of task. 
139   */
140  protected void setFinishTime(long finishTime) {
141    super.setFinishTime(finishTime);
142  }
143
144  /** 
145   * set start time of the task. 
146   */ 
147  protected void setStartTime(long startTime) {
148    super.setStartTime(startTime);
149  }
150
151}