CPD Results

The following document contains the results of PMD's CPD 5.2.3.

Duplications

File Line
org/apache/hadoop/chukwa/extraction/demux/processor/mapper/JobLogHistoryProcessor.java 104
org/apache/hadoop/chukwa/extraction/demux/processor/mapper/Log4jJobHistoryProcessor.java 103
        keys.put("JOBID", jobId);
      }
      // if (keys.get("RECORD_TYPE").equalsIgnoreCase("Job") &&
      // keys.containsKey("SUBMIT_TIME"))
      // {
      // // Job JOBID="job_200804210403_0005" JOBNAME="MY_JOB"
      // USER="userxxx"
      // // SUBMIT_TIME="1208760436751"
      // JOBCONF="/mapredsystem/xxx.yyy.com/job_200804210403_0005/job.xml"
      //					
      //					
      // }
      // else if (keys.get("RECORD_TYPE").equalsIgnoreCase("Job") &&
      // keys.containsKey("LAUNCH_TIME"))
      // {
      // // Job JOBID="job_200804210403_0005" LAUNCH_TIME="1208760437110"
      // TOTAL_MAPS="5912" TOTAL_REDUCES="739"
      //					
      // }
      // else if (keys.get("RECORD_TYPE").equalsIgnoreCase("Job") &&
      // keys.containsKey("FINISH_TIME"))
      // {
      // // Job JOBID="job_200804210403_0005" FINISH_TIME="1208760906816"
      // JOB_STATUS="SUCCESS" FINISHED_MAPS="5912" FINISHED_REDUCES="739"
      // FAILED_MAPS="0" FAILED_REDUCES="0"
      // // COUNTERS="File Systems.Local bytes read:1735053407244,File
      // Systems.Local bytes written:2610106384012,File Systems.HDFS bytes
      // read:801605644910,File Systems.HDFS bytes written:44135800,
      // // Job Counters .Launched map tasks:5912,Job Counters .Launched
      // reduce tasks:739,Job Counters .Data-local map tasks:5573,Job
      // Counters .Rack-local map tasks:316,Map-Reduce Framework.
      // // Map input records:9410696067,Map-Reduce Framework.Map output
      // records:9410696067,Map-Reduce Framework.Map input
      // bytes:801599188816,Map-Reduce Framework.Map output
      // bytes:784427968116,
      // // Map-Reduce Framework.Combine input records:0,Map-Reduce
      // Framework.Combine output records:0,Map-Reduce Framework.Reduce
      // input groups:477265,Map-Reduce Framework.Reduce input
      // records:739000,
      // // Map-Reduce Framework.Reduce output records:739000"
      //					
      // }
      // else
      if (keys.get("RECORD_TYPE").equalsIgnoreCase("MapAttempt")
          && keys.containsKey("START_TIME")) {
        // MapAttempt TASK_TYPE="MAP"
        // TASKID="tip_200804210403_0005_m_000018"
        // TASK_ATTEMPT_ID="task_200804210403_0005_m_000018_0"
        // START_TIME="1208760437531"
        // HOSTNAME="tracker_xxx.yyy.com:xxx.yyy.com/xxx.xxx.xxx.xxx:53734"

        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/Map/" + keys.get("JOBID") + "/"
            + keys.get("START_TIME"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("START_TIME")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("START_TIME", keys.get("START_TIME"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/Map/S");
        output.collect(key, record);

      } else if (keys.get("RECORD_TYPE").equalsIgnoreCase("MapAttempt")
          && keys.containsKey("FINISH_TIME")) {
        // MapAttempt TASK_TYPE="MAP"
        // TASKID="tip_200804210403_0005_m_005494"
        // TASK_ATTEMPT_ID="task_200804210403_0005_m_005494_0"
        // TASK_STATUS="SUCCESS"
        // FINISH_TIME="1208760624124"
        // HOSTNAME="tracker_xxxx.yyyy.com:xxx.yyy.com/xxx.xxx.xxx.xxx:55491"

        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/Map/" + keys.get("JOBID") + "/"
            + keys.get("FINISH_TIME"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("FINISH_TIME")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("FINISH_TIME", keys.get("FINISH_TIME"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/Map/E");
        output.collect(key, record);
      }

      else if (keys.get("RECORD_TYPE").equalsIgnoreCase("ReduceAttempt")
          && keys.containsKey("START_TIME")) {
        // ReduceAttempt TASK_TYPE="REDUCE"
        // TASKID="tip_200804210403_0005_r_000138"
        // TASK_ATTEMPT_ID="task_200804210403_0005_r_000138_0"
        // START_TIME="1208760454885"
        // HOSTNAME="tracker_xxxx.yyyy.com:xxx.yyy.com/xxx.xxx.xxx.xxx:51947"

        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/SHUFFLE/" + keys.get("JOBID") + "/"
            + keys.get("START_TIME"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("START_TIME")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("START_TIME", keys.get("START_TIME"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/SHUFFLE/S");
        output.collect(key, record);

      } else if (keys.get("RECORD_TYPE").equalsIgnoreCase("ReduceAttempt")
          && keys.containsKey("FINISH_TIME")) {
        // ReduceAttempt TASK_TYPE="REDUCE"
        // TASKID="tip_200804210403_0005_r_000138"
        // TASK_ATTEMPT_ID="task_200804210403_0005_r_000138_0"
        // TASK_STATUS="SUCCESS" SHUFFLE_FINISHED="1208760787167"
        // SORT_FINISHED="1208760787354" FINISH_TIME="1208760802395"
        // HOSTNAME="tracker__xxxx.yyyy.com:xxx.yyy.com/xxx.xxx.xxx.xxx:51947"

        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/SHUFFLE/" + keys.get("JOBID") + "/"
            + keys.get("SHUFFLE_FINISHED"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("SHUFFLE_FINISHED")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("SHUFFLE_FINISHED", keys.get("SHUFFLE_FINISHED"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/SHUFFLE/E");
        output.collect(key, record);

        // SORT
        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/SORT/" + keys.get("JOBID") + "/"
            + keys.get("SHUFFLE_FINISHED"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("SHUFFLE_FINISHED")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("START_TIME", keys.get("SHUFFLE_FINISHED"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/SORT/S");
        output.collect(key, record);

        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/SORT/" + keys.get("JOBID") + "/"
            + keys.get("SORT_FINISHED"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("SORT_FINISHED")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("SORT_FINISHED", keys.get("SORT_FINISHED"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/SORT/E");
        output.collect(key, record);

        // Reduce
        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/REDUCE/" + keys.get("JOBID") + "/"
            + keys.get("SORT_FINISHED"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("SORT_FINISHED")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("START_TIME", keys.get("SORT_FINISHED"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/REDUCE/S");
        output.collect(key, record);

        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/REDUCE/" + keys.get("JOBID") + "/"
            + keys.get("FINISH_TIME"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("SORT_FINISHED")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("FINISH_TIME", keys.get("SORT_FINISHED"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/REDUCE/E");
        output.collect(key, record);

      } else if (keys.get("RECORD_TYPE").equalsIgnoreCase("Job")) {
File Line
org/apache/hadoop/chukwa/hicc/OfflineTimeHandler.java 110
org/apache/hadoop/chukwa/hicc/TimeHandler.java 176
    }
    SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm");
    SimpleDateFormat formatDate = new SimpleDateFormat("yyyy-MM-dd");
    SimpleDateFormat formatHour = new SimpleDateFormat("HH");
    SimpleDateFormat formatMin = new SimpleDateFormat("mm");

    formatter.setTimeZone(this.tz);
    formatDate.setTimeZone(this.tz);
    formatHour.setTimeZone(this.tz);
    formatMin.setTimeZone(this.tz);

    startS = formatter.format(start);
    this.startDate = formatDate.format(start);
    this.startHour = formatHour.format(start);
    this.startMin = formatMin.format(start);
    endS = formatter.format(end);
    this.endDate = formatDate.format(end);
    this.endHour = formatHour.format(end);
    this.endMin = formatMin.format(end);
  }

  public String getStartDate(String format) {
    SimpleDateFormat formatter = new SimpleDateFormat(format);
    formatter.setTimeZone(this.tz);
    return formatter.format(this.start);
  }

  public String getStartDate() {
    return this.startDate;
  }

  public String getStartHour() {
    return this.startHour;
  }

  public String getStartMinute() {
    return this.startMin;
  }

  public String getStartTimeText() {
    return this.startS;
  }

  public long getStartTime() {
    return start;
  }

  public String getEndDate(String format) {
    SimpleDateFormat formatter = new SimpleDateFormat(format);
    formatter.setTimeZone(this.tz);
    return formatter.format(this.end);
  }

  public String getEndDate() {
    return this.endDate;
  }

  public String getEndHour() {
    return this.endHour;
  }

  public String getEndMinute() {
    return this.endMin;
  }

  public String getEndTimeText() {
    return this.endS;
  }

  public long getEndTime() {
    return end;
  }

}
File Line
org/apache/hadoop/chukwa/datacollection/agent/MemLimitQueue.java 69
org/apache/hadoop/chukwa/datacollection/agent/NonBlockingMemLimitQueue.java 72
        }
      }
      metrics.fullQueue.set(0);
      dataSize += chunk.getData().length;
      queue.add(chunk);
      metrics.addedChunk.inc();
      metrics.queueSize.set(queue.size());
      metrics.dataSize.set(dataSize);
      this.notifyAll();
    }

  }

  /**
   * @see org.apache.hadoop.chukwa.datacollection.ChunkQueue#collect(java.util.List,
   *      int)
   */
  public void collect(List<Chunk> events, int maxSize)
      throws InterruptedException {
    synchronized (this) {
      // we can't just say queue.take() here, since we're holding a lock.
      while (queue.isEmpty()) {
        this.wait();
      }

      int size = 0;
      while (!queue.isEmpty() && (size < maxSize)) {
        Chunk e = this.queue.remove();
        metrics.removedChunk.inc();
        int chunkSize = e.getData().length;
        size += chunkSize;
        dataSize -= chunkSize;
        metrics.dataSize.set(dataSize);
        events.add(e);
      }
      metrics.queueSize.set(queue.size());
      this.notifyAll();
    }

    if (log.isDebugEnabled()) {
      log.debug("WaitingQueue.inQueueCount:" + queue.size()
          + "\tWaitingQueue.collectCount:" + events.size());
    }
  }

  public int size() {
    return queue.size();
  }
  
  private void configure(Configuration conf) {
    MAX_MEM_USAGE = QUEUE_SIZE;
    if(conf == null){
      return;
    }
    String limit = conf.get(CHUNK_QUEUE_LIMIT);
    if(limit != null){
      try{
        MAX_MEM_USAGE = Integer.parseInt(limit);
      } catch(NumberFormatException nfe) {
        log.error("Exception reading property " + CHUNK_QUEUE_LIMIT
            + ". Defaulting internal queue size to " + QUEUE_SIZE);
      }
    }
    log.info("Using MemLimitQueue limit of " + MAX_MEM_USAGE);
File Line
org/apache/hadoop/chukwa/ChukwaArchiveKey.java 311
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordKey.java 233
        {
          int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1);
          int i2 = org.apache.hadoop.record.Utils.readVInt(b2, s2);
          int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);
          int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);
          s1 += z1;
          s2 += z2;
          l1 -= z1;
          l2 -= z2;
          int r1 = org.apache.hadoop.record.Utils.compareBytes(b1, s1, i1, b2,
              s2, i2);
          if (r1 != 0) {
            return (r1 < 0) ? -1 : 0;
          }
          s1 += i1;
          s2 += i2;
          l1 -= i1;
          l1 -= i2;
        }
        {
          int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1);
          int i2 = org.apache.hadoop.record.Utils.readVInt(b2, s2);
          int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);
          int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);
          s1 += z1;
          s2 += z2;
          l1 -= z1;
          l2 -= z2;
          int r1 = org.apache.hadoop.record.Utils.compareBytes(b1, s1, i1, b2,
              s2, i2);
          if (r1 != 0) {
            return (r1 < 0) ? -1 : 0;
          }
          s1 += i1;
          s2 += i2;
          l1 -= i1;
          l1 -= i2;
        }
File Line
org/apache/hadoop/chukwa/ChukwaArchiveKey.java 53
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordJT.java 52
  }

  public static org.apache.hadoop.record.meta.RecordTypeInfo getTypeInfo() {
    return _rio_recTypeInfo;
  }

  public static void setTypeFilter(
      org.apache.hadoop.record.meta.RecordTypeInfo rti) {
    if (null == rti)
      return;
    _rio_rtiFilter = rti;
    _rio_rtiFilterFields = null;
  }

  private static void setupRtiFields() {
    if (null == _rio_rtiFilter)
      return;
    // we may already have done this
    if (null != _rio_rtiFilterFields)
      return;
    int _rio_i, _rio_j;
    _rio_rtiFilterFields = new int[_rio_rtiFilter.getFieldTypeInfos().size()];
    for (_rio_i = 0; _rio_i < _rio_rtiFilterFields.length; _rio_i++) {
      _rio_rtiFilterFields[_rio_i] = 0;
    }
    java.util.Iterator<org.apache.hadoop.record.meta.FieldTypeInfo> _rio_itFilter = _rio_rtiFilter
        .getFieldTypeInfos().iterator();
    _rio_i = 0;
    while (_rio_itFilter.hasNext()) {
      org.apache.hadoop.record.meta.FieldTypeInfo _rio_tInfoFilter = _rio_itFilter
          .next();
      java.util.Iterator<org.apache.hadoop.record.meta.FieldTypeInfo> _rio_it = _rio_recTypeInfo
          .getFieldTypeInfos().iterator();
      _rio_j = 1;
      while (_rio_it.hasNext()) {
        org.apache.hadoop.record.meta.FieldTypeInfo _rio_tInfo = _rio_it.next();
        if (_rio_tInfo.equals(_rio_tInfoFilter)) {
          _rio_rtiFilterFields[_rio_i] = _rio_j;
          break;
        }
        _rio_j++;
      }
      _rio_i++;
    }
  }

  public long getTimePartition() {
File Line
org/apache/hadoop/chukwa/ChukwaArchiveKey.java 53
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordJT.java 52
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordKey.java 44
  }

  public static org.apache.hadoop.record.meta.RecordTypeInfo getTypeInfo() {
    return _rio_recTypeInfo;
  }

  public static void setTypeFilter(
      org.apache.hadoop.record.meta.RecordTypeInfo rti) {
    if (null == rti)
      return;
    _rio_rtiFilter = rti;
    _rio_rtiFilterFields = null;
  }

  private static void setupRtiFields() {
    if (null == _rio_rtiFilter)
      return;
    // we may already have done this
    if (null != _rio_rtiFilterFields)
      return;
    int _rio_i, _rio_j;
    _rio_rtiFilterFields = new int[_rio_rtiFilter.getFieldTypeInfos().size()];
    for (_rio_i = 0; _rio_i < _rio_rtiFilterFields.length; _rio_i++) {
      _rio_rtiFilterFields[_rio_i] = 0;
    }
    java.util.Iterator<org.apache.hadoop.record.meta.FieldTypeInfo> _rio_itFilter = _rio_rtiFilter
        .getFieldTypeInfos().iterator();
    _rio_i = 0;
    while (_rio_itFilter.hasNext()) {
      org.apache.hadoop.record.meta.FieldTypeInfo _rio_tInfoFilter = _rio_itFilter
          .next();
      java.util.Iterator<org.apache.hadoop.record.meta.FieldTypeInfo> _rio_it = _rio_recTypeInfo
          .getFieldTypeInfos().iterator();
      _rio_j = 1;
      while (_rio_it.hasNext()) {
        org.apache.hadoop.record.meta.FieldTypeInfo _rio_tInfo = _rio_it.next();
        if (_rio_tInfo.equals(_rio_tInfoFilter)) {
          _rio_rtiFilterFields[_rio_i] = _rio_j;
          break;
        }
        _rio_j++;
      }
      _rio_i++;
    }
  }

  public long getTimePartition() {
File Line
org/apache/hadoop/chukwa/hicc/OfflineTimeHandler.java 90
org/apache/hadoop/chukwa/hicc/TimeHandler.java 102
      String period = map.get("period");
      this.start = now.getTimeInMillis();
      this.end = now.getTimeInMillis();
      if (period.equals("last1hr")) {
        start = end - (60 * 60 * 1000);
      } else if (period.equals("last2hr")) {
        start = end - (2 * 60 * 60 * 1000);
      } else if (period.equals("last3hr")) {
        start = end - (3 * 60 * 60 * 1000);
      } else if (period.equals("last6hr")) {
        start = end - (6 * 60 * 60 * 1000);
      } else if (period.equals("last12hr")) {
        start = end - (12 * 60 * 60 * 1000);
      } else if (period.equals("last24hr")) {
        start = end - (24 * 60 * 60 * 1000);
      } else if (period.equals("last7d")) {
        start = end - (7 * 24 * 60 * 60 * 1000);
      } else if (period.equals("last30d")) {
        start = end - (30L * 24 * 60 * 60 * 1000);
      }
File Line
org/apache/hadoop/chukwa/hicc/OfflineTimeHandler.java 60
org/apache/hadoop/chukwa/hicc/OfflineTimeHandler.java 89
        && !map.get("period").equals("")) {
      String period = map.get("period");
      this.start = now.getTimeInMillis();
      this.end = now.getTimeInMillis();
      if (period.equals("last1hr")) {
        start = end - (60 * 60 * 1000);
      } else if (period.equals("last2hr")) {
        start = end - (2 * 60 * 60 * 1000);
      } else if (period.equals("last3hr")) {
        start = end - (3 * 60 * 60 * 1000);
      } else if (period.equals("last6hr")) {
        start = end - (6 * 60 * 60 * 1000);
      } else if (period.equals("last12hr")) {
        start = end - (12 * 60 * 60 * 1000);
      } else if (period.equals("last24hr")) {
        start = end - (24 * 60 * 60 * 1000);
      } else if (period.equals("last7d")) {
        start = end - (7 * 24 * 60 * 60 * 1000);
      } else if (period.equals("last30d")) {
        start = end - (30 * 24 * 60 * 60 * 1000);
File Line
org/apache/hadoop/chukwa/hicc/OfflineTimeHandler.java 61
org/apache/hadoop/chukwa/hicc/TimeHandler.java 102
      String period = map.get("period");
      this.start = now.getTimeInMillis();
      this.end = now.getTimeInMillis();
      if (period.equals("last1hr")) {
        start = end - (60 * 60 * 1000);
      } else if (period.equals("last2hr")) {
        start = end - (2 * 60 * 60 * 1000);
      } else if (period.equals("last3hr")) {
        start = end - (3 * 60 * 60 * 1000);
      } else if (period.equals("last6hr")) {
        start = end - (6 * 60 * 60 * 1000);
      } else if (period.equals("last12hr")) {
        start = end - (12 * 60 * 60 * 1000);
      } else if (period.equals("last24hr")) {
        start = end - (24 * 60 * 60 * 1000);
      } else if (period.equals("last7d")) {
        start = end - (7 * 24 * 60 * 60 * 1000);
      } else if (period.equals("last30d")) {
        start = end - (30 * 24 * 60 * 60 * 1000);
File Line
org/apache/hadoop/chukwa/analysis/salsa/fsm/DataNodeClientTraceMapper.java 196
org/apache/hadoop/chukwa/analysis/salsa/fsm/TaskTrackerClientTraceMapper.java 184
    end_rec.unique_id = new StringBuilder().append(end_rec.state_name).append("@").append(end_rec.identifier).append("@").append(end_rec.job_id).toString();
      
    start_rec.add_info.put(Record.tagsField,val.getValue(Record.tagsField));
		start_rec.add_info.put("csource",val.getValue("csource"));
    end_rec.add_info.put(Record.tagsField,val.getValue(Record.tagsField));
		end_rec.add_info.put("csource",val.getValue("csource"));
		end_rec.add_info.put("STATE_STRING","SUCCESS"); // by default
		
		// add counter value
		end_rec.add_info.put("BYTES",val.getValue("bytes"));
		    
    String crk_mid_string_start = new StringBuilder().append(start_rec.getUniqueID()).append("_").append(start_rec.timestamp).toString();
    String crk_mid_string_end = new StringBuilder().append(end_rec.getUniqueID()).append("_").append(start_rec.timestamp).toString();
    output.collect(new ChukwaRecordKey(FSM_CRK_ReduceType, crk_mid_string_start), start_rec);
    output.collect(new ChukwaRecordKey(FSM_CRK_ReduceType, crk_mid_string_end), end_rec);
    
  }

} // end of mapper class
File Line
org/apache/hadoop/chukwa/extraction/demux/processor/mapper/LogEntry.java 24
org/apache/hadoop/chukwa/extraction/hbase/LogEntry.java 24
public class LogEntry {
	private SimpleDateFormat sdf = new SimpleDateFormat(
			"yyyy-MM-dd HH:mm");

	private Date date;
	private String logLevel;
	private String className;
	private String body;

	public LogEntry(String recordEntry) throws ParseException {
		String dStr = recordEntry.substring(0, 23);
		date = sdf.parse(dStr);
		int start = 24;
		int idx = recordEntry.indexOf(' ', start);
		logLevel = recordEntry.substring(start, idx);
		start = idx + 1;
		idx = recordEntry.indexOf(' ', start);
		className = recordEntry.substring(start, idx - 1);
		body = recordEntry.substring(idx + 1);
	}

	public Date getDate() {
		return (Date) date.clone();
	}

	public void setDate(Date date) {
		this.date = (Date) date.clone();
	}

	public String getLogLevel() {
		return logLevel;
	}

	public String getClassName() {
		return className;
	}

	public String getBody() {
		return body;
	}
}
File Line
org/apache/hadoop/chukwa/ChukwaArchiveKey.java 287
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordJT.java 298
        }
        return (os - s);
      } catch (java.io.IOException e) {
        throw new RuntimeException(e);
      }
    }

    static public int compareRaw(byte[] b1, int s1, int l1, byte[] b2, int s2,
        int l2) {
      try {
        int os1 = s1;
        {
          long i1 = org.apache.hadoop.record.Utils.readVLong(b1, s1);
          long i2 = org.apache.hadoop.record.Utils.readVLong(b2, s2);
          if (i1 != i2) {
            return ((i1 - i2) < 0) ? -1 : 0;
          }
          int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);
          int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);
          s1 += z1;
          s2 += z2;
          l1 -= z1;
          l2 -= z2;
        }
        {
          int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1);
File Line
org/apache/hadoop/chukwa/analysis/salsa/visualization/Heatmap.java 676
org/apache/hadoop/chukwa/analysis/salsa/visualization/Heatmap.java 716
    } else if (this.query_stat_type.equals("avg_volume")) {
      for(int i=0;i<events.size();i++) {
        HashMap<String, Object> event = events.get(i);
        start=(Long)event.get("start_time");
        end=(Long)event.get("finish_time");
        start_millis = Integer.parseInt(((String)event.get("start_time_millis")));
        end_millis = Integer.parseInt(((String)event.get("finish_time_millis")));      
        String this_host = (String) event.get("hostname");
        String other_host = (String) event.get("other_host");
        int this_host_idx = host_indices.get(this_host).intValue();
        int other_host_idx = host_indices.get(other_host).intValue();

        long curr_val = Long.parseLong((String)event.get("bytes"));

        // to, from
        stats[other_host_idx][this_host_idx] += curr_val;
File Line
org/apache/hadoop/chukwa/hicc/rest/PieChartController.java 104
org/apache/hadoop/chukwa/hicc/rest/TileController.java 101
      Template template = velocity.getTemplate("pie.vm");
      sw = new StringWriter();
      template.merge(context, sw);
    } catch (Exception e) {
      e.printStackTrace();
      return e.getMessage();
    }
    return sw.toString();
  }

  @PUT
  @Path("preview/series")
  @Produces("application/json")
  public String previewSeries(@Context HttpServletRequest request, String buffer) {
    Type listType = new TypeToken<ArrayList<SeriesMetaData>>() {
    }.getType();
    long startTime = 0;
    long endTime = 0;
    TimeHandler time = new TimeHandler(request);
    startTime = time.getStartTime();
    endTime = time.getEndTime();
    Gson gson = new Gson();
    ArrayList<SeriesMetaData> series = gson.fromJson(buffer, listType);
    List<String> data = ChukwaHBaseStore.getData(series, startTime, endTime);
    String result = gson.toJson(data);
    return result;
  }
}
File Line
org/apache/hadoop/chukwa/extraction/demux/processor/mapper/SystemMetrics.java 59
org/apache/hadoop/chukwa/extraction/hbase/SystemMetrics.java 48
    cal.set(Calendar.MILLISECOND, 0);
    JSONArray cpuList = (JSONArray) json.get("cpu");
    double combined = 0.0;
    double user = 0.0;
    double sys = 0.0;
    double idle = 0.0;
    int actualSize = 0;
    for(int i = 0; i< cpuList.size(); i++) {
      JSONObject cpu = (JSONObject) cpuList.get(i);
      //Work around for sigar returning null sometimes for cpu metrics on pLinux
      if(cpu.get("combined") == null){
    	  continue;
      }
      actualSize++;
      combined = combined + Double.parseDouble(cpu.get("combined").toString());
      user = user + Double.parseDouble(cpu.get("user").toString());
      sys = sys + Double.parseDouble(cpu.get("sys").toString());
      idle = idle + Double.parseDouble(cpu.get("idle").toString());
File Line
org/apache/hadoop/chukwa/analysis/salsa/visualization/Swimlanes.java 102
org/apache/hadoop/chukwa/analysis/salsa/visualization/Swimlanes.java 126
      rownumiter = orig_tab.rows(); // iterate over everything
      while (rownumiter.hasNext()) {
        origrownum = ((Integer)rownumiter.next()).intValue();
        newrownum = this.plot_tab.addRow();
        this.plot_tab.set(newrownum, "state_name", orig_tab.getString(origrownum, "state_name"));
        this.plot_tab.set(newrownum, "ycoord", orig_tab.getInt(origrownum, "seqno"));
        this.plot_tab.set(newrownum,"hostname",orig_tab.getString(origrownum,"hostname"));
        this.plot_tab.set(newrownum,"friendly_id",orig_tab.getString(origrownum,"friendly_id"));
        this.plot_tab.set(newrownum,START_FIELD_NAME, orig_tab.getDouble(origrownum,START_FIELD_NAME));
        this.plot_tab.set(newrownum,END_FIELD_NAME, orig_tab.getDouble(origrownum,END_FIELD_NAME));
      }      
File Line
org/apache/hadoop/chukwa/ChukwaArchiveKey.java 311
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordJT.java 332
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordKey.java 233
        {
          int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1);
          int i2 = org.apache.hadoop.record.Utils.readVInt(b2, s2);
          int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);
          int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);
          s1 += z1;
          s2 += z2;
          l1 -= z1;
          l2 -= z2;
          int r1 = org.apache.hadoop.record.Utils.compareBytes(b1, s1, i1, b2,
              s2, i2);
          if (r1 != 0) {
            return (r1 < 0) ? -1 : 0;
          }
          s1 += i1;
          s2 += i2;
          l1 -= i1;
          l1 -= i2;
        }
        {
          int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1);
File Line
org/apache/hadoop/chukwa/ChukwaArchiveKey.java 310
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordKey.java 251
        }
        {
          int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1);
          int i2 = org.apache.hadoop.record.Utils.readVInt(b2, s2);
          int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);
          int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);
          s1 += z1;
          s2 += z2;
          l1 -= z1;
          l2 -= z2;
          int r1 = org.apache.hadoop.record.Utils.compareBytes(b1, s1, i1, b2,
              s2, i2);
          if (r1 != 0) {
            return (r1 < 0) ? -1 : 0;
          }
          s1 += i1;
          s2 += i2;
          l1 -= i1;
          l1 -= i2;
        }
File Line
org/apache/hadoop/chukwa/ChukwaArchiveKey.java 330
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordJT.java 332
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordKey.java 233
        {
          int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1);
          int i2 = org.apache.hadoop.record.Utils.readVInt(b2, s2);
          int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);
          int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);
          s1 += z1;
          s2 += z2;
          l1 -= z1;
          l2 -= z2;
          int r1 = org.apache.hadoop.record.Utils.compareBytes(b1, s1, i1, b2,
              s2, i2);
          if (r1 != 0) {
            return (r1 < 0) ? -1 : 0;
          }
          s1 += i1;
          s2 += i2;
          l1 -= i1;
          l1 -= i2;
        }
        {
File Line
org/apache/hadoop/chukwa/analysis/salsa/visualization/Heatmap.java 653
org/apache/hadoop/chukwa/analysis/salsa/visualization/Heatmap.java 676
org/apache/hadoop/chukwa/analysis/salsa/visualization/Heatmap.java 716
    } else if (this.query_stat_type.equals("avg_duration")) {
      for(int i=0;i<events.size();i++) {
        HashMap<String, Object> event = events.get(i);
        start=(Long)event.get("start_time");
        end=(Long)event.get("finish_time");
        start_millis = Integer.parseInt(((String)event.get("start_time_millis")));
        end_millis = Integer.parseInt(((String)event.get("finish_time_millis")));      
        String this_host = (String) event.get("hostname");
        String other_host = (String) event.get("other_host");
        int this_host_idx = host_indices.get(this_host).intValue();
        int other_host_idx = host_indices.get(other_host).intValue();

        long curr_val = end_millis - start_millis + ((end - start)*1000);
File Line
org/apache/hadoop/chukwa/extraction/demux/processor/mapper/DatanodeProcessor.java 117
org/apache/hadoop/chukwa/extraction/demux/processor/mapper/HBaseMasterProcessor.java 67
        log.warn("timeStamp tag not set in JMX adaptor for datanode");
      } else {
        timeStamp = Long.parseLong(ttTag);
      }
      @SuppressWarnings("unchecked")
      Iterator<Map.Entry<String, ?>> keys = obj.entrySet().iterator();

      while (keys.hasNext()) {
        Map.Entry<String, ?> entry = keys.next();
        String key = entry.getKey();
        Object value = entry.getValue();
        String valueString = value == null ? "" : value.toString();

        // Calculate rate for some of the metrics
        if (rateMap.containsKey(key)) {
          long oldValue = rateMap.get(key);
          long curValue = Long.parseLong(valueString);
          rateMap.put(key, curValue);
          long newValue = curValue - oldValue;
          if (newValue < 0) {
            log.error("DatanodeProcessor's rateMap might be reset or corrupted for metric "
File Line
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordJT.java 332
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordKey.java 233
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordKey.java 252
            {
              int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1);
              int i2 = org.apache.hadoop.record.Utils.readVInt(b2, s2);
              int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);
              int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);
              s1 += z1;
              s2 += z2;
              l1 -= z1;
              l2 -= z2;
              int r1 = org.apache.hadoop.record.Utils.compareBytes(b1, s1, i1,
                  b2, s2, i2);
              if (r1 != 0) {
                return (r1 < 0) ? -1 : 0;
              }
              s1 += i1;
              s2 += i2;
              l1 -= i1;
              l1 -= i2;
            }
File Line
org/apache/hadoop/chukwa/analysis/salsa/visualization/Heatmap.java 638
org/apache/hadoop/chukwa/analysis/salsa/visualization/Heatmap.java 653
org/apache/hadoop/chukwa/analysis/salsa/visualization/Heatmap.java 676
org/apache/hadoop/chukwa/analysis/salsa/visualization/Heatmap.java 699
org/apache/hadoop/chukwa/analysis/salsa/visualization/Heatmap.java 716
    if (this.query_stat_type.equals("transaction_count")) {
      for(int i=0;i<events.size();i++) {
        HashMap<String, Object> event = events.get(i);
        start=(Long)event.get("start_time");
        end=(Long)event.get("finish_time");
        start_millis = Integer.parseInt(((String)event.get("start_time_millis")));
        end_millis = Integer.parseInt(((String)event.get("finish_time_millis")));      
        String this_host = (String) event.get("hostname");
        String other_host = (String) event.get("other_host");
        int this_host_idx = host_indices.get(this_host).intValue();
        int other_host_idx = host_indices.get(other_host).intValue();
File Line
org/apache/hadoop/chukwa/analysis/salsa/visualization/Swimlanes.java 125
org/apache/hadoop/chukwa/analysis/salsa/visualization/Swimlanes.java 178
          "OR [state_name] == 'shuffle_remote'")
      );
      
      while (rownumiter.hasNext()) {
        origrownum = ((Integer)rownumiter.next()).intValue();
        newrownum = this.plot_tab.addRow();
        this.plot_tab.set(newrownum, "state_name", orig_tab.getString(origrownum, "state_name"));
        this.plot_tab.set(newrownum, "ycoord", orig_tab.getInt(origrownum, "seqno"));
        this.plot_tab.set(newrownum,"hostname",orig_tab.getString(origrownum,"hostname"));
        this.plot_tab.set(newrownum,"friendly_id",orig_tab.getString(origrownum,"friendly_id"));
        this.plot_tab.set(newrownum,START_FIELD_NAME, orig_tab.getDouble(origrownum,START_FIELD_NAME));
        this.plot_tab.set(newrownum,END_FIELD_NAME, orig_tab.getDouble(origrownum,END_FIELD_NAME));
File Line
org/apache/hadoop/chukwa/analysis/salsa/visualization/Swimlanes.java 102
org/apache/hadoop/chukwa/analysis/salsa/visualization/Swimlanes.java 179
      rownumiter = orig_tab.rows(); // iterate over everything
      while (rownumiter.hasNext()) {
        origrownum = ((Integer)rownumiter.next()).intValue();
        newrownum = this.plot_tab.addRow();
        this.plot_tab.set(newrownum, "state_name", orig_tab.getString(origrownum, "state_name"));
        this.plot_tab.set(newrownum, "ycoord", orig_tab.getInt(origrownum, "seqno"));
        this.plot_tab.set(newrownum,"hostname",orig_tab.getString(origrownum,"hostname"));
        this.plot_tab.set(newrownum,"friendly_id",orig_tab.getString(origrownum,"friendly_id"));
        this.plot_tab.set(newrownum,START_FIELD_NAME, orig_tab.getDouble(origrownum,START_FIELD_NAME));
        this.plot_tab.set(newrownum,END_FIELD_NAME, orig_tab.getDouble(origrownum,END_FIELD_NAME));
File Line
org/apache/hadoop/chukwa/extraction/demux/processor/mapper/JobLogHistoryProcessor.java 335
org/apache/hadoop/chukwa/extraction/demux/processor/mapper/Log4jJobHistoryProcessor.java 315
          record.add(entry.getKey(), entry.getValue());
        }

        output.collect(key, record);
      }

      if (keys.containsKey("TASK_TYPE")
          && keys.containsKey("COUNTERS")
          && (keys.get("TASK_TYPE").equalsIgnoreCase("REDUCE") || keys.get(
              "TASK_TYPE").equalsIgnoreCase("MAP"))) {
        // MAP
        // Task TASKID="tip_200804210403_0005_m_000154" TASK_TYPE="MAP"
        // TASK_STATUS="SUCCESS" FINISH_TIME="1208760463883"
        // COUNTERS="File Systems.Local bytes read:159265655,File
        // Systems.Local bytes written:318531310,
        // File Systems.HDFS bytes read:145882417,Map-Reduce
        // Framework.Map input records:1706604,
        // Map-Reduce Framework.Map output records:1706604,Map-Reduce
        // Framework.Map input bytes:145882057,
        // Map-Reduce Framework.Map output bytes:142763253,Map-Reduce
        // Framework.Combine input records:0,Map-Reduce
        // Framework.Combine output records:0"

        // REDUCE
        // Task TASKID="tip_200804210403_0005_r_000524"
        // TASK_TYPE="REDUCE" TASK_STATUS="SUCCESS"
        // FINISH_TIME="1208760877072"
        // COUNTERS="File Systems.Local bytes read:1179319677,File
        // Systems.Local bytes written:1184474889,File Systems.HDFS
        // bytes written:59021,
        // Map-Reduce Framework.Reduce input groups:684,Map-Reduce
        // Framework.Reduce input records:1000,Map-Reduce
        // Framework.Reduce output records:1000"

        record = new ChukwaRecord();
        key = new ChukwaRecordKey();
        buildGenericRecord(record, null, Long
            .parseLong(keys.get("FINISH_TIME")), "SizeVsFinishTime");
        extractCounters(record, keys.get("COUNTERS"));
        record.add("JOBID", keys.get("JOBID"));
        record.add("TASKID", keys.get("TASKID"));
        record.add("TASK_TYPE", keys.get("TASK_TYPE"));
File Line
org/apache/hadoop/chukwa/hicc/rest/ChartController.java 159
org/apache/hadoop/chukwa/hicc/rest/CirclesController.java 102
org/apache/hadoop/chukwa/hicc/rest/PieChartController.java 104
org/apache/hadoop/chukwa/hicc/rest/TileController.java 101
      Template template = velocity.getTemplate("chart.vm");
      sw = new StringWriter();
      template.merge(context, sw);
    } catch (Exception e) {
      e.printStackTrace();
      return e.getMessage();
    }
    return sw.toString();
  }

  @PUT
  @Path("preview/series")
  @Produces("application/json")
  public String previewSeries(@Context HttpServletRequest request, String buffer) {
    Type listType = new TypeToken<ArrayList<SeriesMetaData>>() {
    }.getType();
    long startTime = 0;
    long endTime = 0;
    TimeHandler time = new TimeHandler(request);
    startTime = time.getStartTime();
    endTime = time.getEndTime();
    Gson gson = new Gson();
    ArrayList<SeriesMetaData> series = gson.fromJson(buffer, listType);
File Line
org/apache/hadoop/chukwa/hicc/rest/CirclesController.java 80
org/apache/hadoop/chukwa/hicc/rest/TileController.java 75
      Template template = velocity.getTemplate("circles.vm");
      sw = new StringWriter();
      template.merge(context, sw);
    } catch (Exception e) {
      e.printStackTrace();
      return e.getMessage();
    }
    return sw.toString();
  }

  @PUT
  @Path("preview")
  public String preview(String buffer) {
    VelocityContext context = new VelocityContext();
    StringWriter sw = null;
    try {
      Gson gson = new Gson();
      Chart chart = gson.fromJson(buffer, Chart.class);
      List<SeriesMetaData> series = chart.getSeries();
      String seriesMetaData = gson.toJson(series);
      context.put("chart", chart);
      context.put("seriesMetaData", seriesMetaData);
      Template template = velocity.getTemplate("circles.vm");
File Line
org/apache/hadoop/chukwa/database/DatabaseConfig.java 56
org/apache/hadoop/chukwa/inputtools/mdl/DataConfig.java 61
    if (System.getenv("CHUKWA_CONF_DIR") != null) {
      // Allow site-specific MDL files to be included in the 
      // configuration so as to keep the "main" mdl.xml pure.
      File confDir = new File(System.getenv("CHUKWA_CONF_DIR"));
      File[] confFiles = confDir.listFiles(new FilenameFilter() {

        @Override
        public boolean accept(File dir, String name) {
          // Implements a naming convention of ending with "mdl.xml"
          // but is careful not to pick up mdl.xml itself again.
          return name.endsWith(MDL_XML) && !name.equals(MDL_XML);
        }

      });

      if (confFiles != null) {
        for (File confFile : confFiles) 
          config.addResource(new Path(confFile.getAbsolutePath()));
      }
    }
  }

  public String get(String key) {
    return config.get(key);
  }

  public void put(String key, String value) {
File Line
org/apache/hadoop/chukwa/extraction/demux/processor/mapper/HadoopMetricsProcessor.java 62
org/apache/hadoop/chukwa/extraction/demux/processor/mapper/JobSummary.java 48
  public HadoopMetricsProcessor() {
    // TODO move that to config
    sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm");
  }

  @SuppressWarnings("unchecked")
  @Override
  protected void parse(String recordEntry,
      OutputCollector<ChukwaRecordKey, ChukwaRecord> output, Reporter reporter)
      throws Throwable {
    try {
      // Look for syslog PRI, if PRI is not found, start from offset of 0.
      int idx = recordEntry.indexOf('>', 0);  
      String dStr = recordEntry.substring(idx+1, idx+23);
      int start = idx + 25;
      idx = recordEntry.indexOf(' ', start);
      // String level = recordEntry.substring(start, idx);
      start = idx + 1;
      idx = recordEntry.indexOf(' ', start);
      // String className = recordEntry.substring(start, idx-1);
      String body = recordEntry.substring(idx + 1);
      body = body.replaceAll("\n", "");
      // log.info("record [" + recordEntry + "] body [" + body +"]");
      Date d = sdf.parse(dStr);
File Line
org/apache/hadoop/chukwa/datacollection/agent/rest/AdaptorController.java 186
org/apache/hadoop/chukwa/datacollection/agent/rest/AdaptorController.java 211
    info.setId(adaptorId);
    info.setDataType(adaptor.getType());
    info.setAdaptorClass(adaptor.getClass().getName());
    String[] status = adaptor.getCurrentStatus().split(" ",2);
    info.setAdaptorParams(status[1]);
    List<AdaptorAveragedRate> rates = new ArrayList<AdaptorAveragedRate>();
    rates.add(new AdaptorAveragedRate(60, adaptorStats.calcAverageRate(adaptor,  60)));
    rates.add(new AdaptorAveragedRate(300, adaptorStats.calcAverageRate(adaptor,  300)));
    rates.add(new AdaptorAveragedRate(600, adaptorStats.calcAverageRate(adaptor,  600)));
    info.setAdaptorRates(rates);
File Line
org/apache/hadoop/chukwa/hicc/rest/ChartController.java 52
org/apache/hadoop/chukwa/hicc/rest/TileController.java 48
public class ChartController {
  static Logger LOG = Logger.getLogger(ChartController.class);
  SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");

  @Context
  VelocityEngine velocity;
  
  /**
   * Render chart using flot.js
   * 
   * @param id Reference ID of Chart stored in HBase chukwa_meta table.
   * @return html chart widget
   */
  @GET
  @Path("draw/{id}")
  @Produces(MediaType.TEXT_HTML)
  public String draw(@PathParam("id") String id) {
    VelocityContext context = new VelocityContext();
    StringWriter sw = null;
    try {
      Chart chart = ChukwaHBaseStore.getChart(id);
      List<SeriesMetaData> series = chart.getSeries();
      Gson gson = new Gson();
      String seriesMetaData = gson.toJson(series);

      context.put("chart", chart);
      context.put("seriesMetaData", seriesMetaData);
      Template template = velocity.getTemplate("chart.vm");
File Line
org/apache/hadoop/chukwa/hicc/rest/CirclesController.java 80
org/apache/hadoop/chukwa/hicc/rest/PieChartController.java 77
org/apache/hadoop/chukwa/hicc/rest/TileController.java 75
      Template template = velocity.getTemplate("circles.vm");
      sw = new StringWriter();
      template.merge(context, sw);
    } catch (Exception e) {
      e.printStackTrace();
      return e.getMessage();
    }
    return sw.toString();
  }

  @PUT
  @Path("preview")
  public String preview(String buffer) {
    VelocityContext context = new VelocityContext();
    StringWriter sw = null;
    try {
      Gson gson = new Gson();
      Chart chart = gson.fromJson(buffer, Chart.class);
      List<SeriesMetaData> series = chart.getSeries();
      String seriesMetaData = gson.toJson(series);
      context.put("chart", chart);
      context.put("seriesMetaData", seriesMetaData);
File Line
org/apache/hadoop/chukwa/analysis/salsa/visualization/Swimlanes.java 104
org/apache/hadoop/chukwa/analysis/salsa/visualization/Swimlanes.java 129
org/apache/hadoop/chukwa/analysis/salsa/visualization/Swimlanes.java 153
        origrownum = ((Integer)rownumiter.next()).intValue();
        newrownum = this.plot_tab.addRow();
        this.plot_tab.set(newrownum, "state_name", orig_tab.getString(origrownum, "state_name"));
        this.plot_tab.set(newrownum, "ycoord", orig_tab.getInt(origrownum, "seqno"));
        this.plot_tab.set(newrownum,"hostname",orig_tab.getString(origrownum,"hostname"));
        this.plot_tab.set(newrownum,"friendly_id",orig_tab.getString(origrownum,"friendly_id"));
        this.plot_tab.set(newrownum,START_FIELD_NAME, orig_tab.getDouble(origrownum,START_FIELD_NAME));
        this.plot_tab.set(newrownum,END_FIELD_NAME, orig_tab.getDouble(origrownum,END_FIELD_NAME));
File Line
org/apache/hadoop/chukwa/extraction/demux/processor/mapper/JobLogHistoryProcessor.java 377
org/apache/hadoop/chukwa/extraction/demux/processor/mapper/Log4jJobHistoryProcessor.java 357
        record.add(Record.tagsField, chunk.getTags());
        // log.info("MR_Graph +1");
        output.collect(key, record);

      }
    } catch (IOException e) {
      log.warn("Unable to collect output in JobLogHistoryProcessor ["
          + recordEntry + "]", e);
      e.printStackTrace();
      throw e;
    }

  }

  protected void extractCounters(ChukwaRecord record, String input) {

    String[] data = null;
    String[] counters = input.split(",");

    for (String counter : counters) {
      data = counter.split(":");
      record.add(data[0].replaceAll(" ", "_").replaceAll("\\.", "_")
          .toUpperCase(), data[1]);
    }
  }

  public String getDataType() {
    return JobLogHistoryProcessor.recordType;
File Line
org/apache/hadoop/chukwa/analysis/salsa/visualization/Heatmap.java 226
org/apache/hadoop/chukwa/analysis/salsa/visualization/Swimlanes.java 540
  }

  /**
   * Set dimensions of image to be generated
   * Call before calling @see #run
   * @param width 
   * @param height 
   */
  public void setDimensions(int width, int height) {
    this.SIZE_X=width;
    this.SIZE_Y=height;
  }
  
  /**
   * Specify whether to print labels of hosts along axes
   * Call before calling @see #run
   * @param legendopt 
   */
  public void setLegend(boolean legendopt) {
    if (legendopt) {
      this.plot_legend = true;
    } else {
      this.plot_legend = false;
    }
  }
  
  
  /**
   * Generates image in specified format, and writes image as binary
   * output to supplied output stream 
   * @param output 
   * @param img_fmt 
   * @param scale 
   * @return 
   */
  public boolean getImage(java.io.OutputStream output, String img_fmt, double scale) {
    dis = new Display(this.viz);
    dis.setSize(SIZE_X,SIZE_Y);
    dis.setHighQuality(true);
    dis.setFont(new Font(Font.SANS_SERIF,Font.PLAIN,24));
    return dis.saveImage(output, img_fmt, scale);
  } 
  
  protected void setupRenderer() {
File Line
org/apache/hadoop/chukwa/extraction/demux/processor/mapper/JobLogHistoryProcessor.java 70
org/apache/hadoop/chukwa/extraction/demux/processor/mapper/Log4jJobHistoryProcessor.java 74
      String body = recordEntry.substring(firstSep);

      internalMatcher.reset(body);

      // String fieldName = null;
      // String fieldValue = null;

      while (internalMatcher.matches()) {

        keys.put(internalMatcher.group(1).trim(), internalMatcher.group(2)
            .trim());

        // TODO Remove debug info before production
        // fieldName = internalMatcher.group(1).trim();
        // fieldValue = internalMatcher.group(2).trim();
        // log.info("JobLogHistoryProcessor Add field: [" + fieldName +
        // "][" + fieldValue +"]" );
        // log.info("EOL : [" + internalMatcher.group(3) + "]" );
        internalMatcher.reset(internalMatcher.group(3));
      }

      if (!keys.containsKey("JOBID")) {
        // Extract JobID from taskID
        // JOBID = "job_200804210403_0005"
        // TASKID = "tip_200804210403_0005_m_000018"
        String jobId = keys.get("TASKID");
        int idx1 = jobId.indexOf('_', 0);
        int idx2 = jobId.indexOf('_', idx1 + 1);
        idx2 = jobId.indexOf('_', idx2 + 1);
        keys.put("JOBID", jobId.substring(idx1 + 1, idx2));
File Line
org/apache/hadoop/chukwa/hicc/rest/PieChartController.java 49
org/apache/hadoop/chukwa/hicc/rest/TileController.java 48
public class PieChartController extends ChartController{
  static Logger LOG = Logger.getLogger(ChartController.class);
  SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");

  @Context
  VelocityEngine velocity;

  /**
   * Render pie chart using chartist.js
   * 
   * @param id Reference ID of Chart stored in HBase chukwa_meta table.
   * @return html chart widget
   */
  @GET
  @Path("draw/{id}")
  @Produces(MediaType.TEXT_HTML)
  public String draw(@PathParam("id") String id) {
    VelocityContext context = new VelocityContext();
    StringWriter sw = null;
    try {
      Chart chart = ChukwaHBaseStore.getChart(id);
      List<SeriesMetaData> series = chart.getSeries();
      Gson gson = new Gson();
      String seriesMetaData = gson.toJson(series);

      context.put("chart", chart);
      context.put("seriesMetaData", seriesMetaData);
File Line
org/apache/hadoop/chukwa/analysis/salsa/fsm/DataNodeClientTraceMapper.java 98
org/apache/hadoop/chukwa/analysis/salsa/fsm/TaskTrackerClientTraceMapper.java 97
    end_rec.fsm_type = new FSMType(FSMType.FILESYSTEM_FSM);
    end_rec.state_type = new StateType(StateType.STATE_END);    
        
    /* extract addresses */
    Matcher src_regex = ipPattern.matcher(val.getValue("src"));
    if (src_regex.matches()) {
      src_add = src_regex.group(1);
    } else {
      log.warn("Failed to match src IP:"+val.getValue("src")+"");
      src_add = "";
    }
    Matcher dest_regex = ipPattern.matcher(val.getValue("dest"));
    if (dest_regex.matches()) {
      dest_add = dest_regex.group(1);
    } else {
      log.warn("Failed to match dest IP:"+val.getValue("dest")+"");
      dest_add = "";
    }
File Line
org/apache/hadoop/chukwa/hicc/ClusterConfig.java 31
org/apache/hadoop/chukwa/util/ClusterConfig.java 30
  static public String getContents(File aFile) {
    // ...checks on aFile are elided
    StringBuffer contents = new StringBuffer();

    try {
      BufferedReader input = new BufferedReader(new InputStreamReader(new FileInputStream(aFile.getAbsolutePath()), Charset.forName("UTF-8")));
      try {
        String line = null; // not declared within while loop
        /*
         * readLine is a bit quirky : it returns the content of a line MINUS the
         * newline. it returns null only for the END of the stream. it returns
         * an empty String if two newlines appear in a row.
         */
        while ((line = input.readLine()) != null) {
          contents.append(line);
          contents.append(System.getProperty("line.separator"));
        }
      } finally {
        input.close();
      }
    } catch (IOException ex) {
      ex.printStackTrace();
    }

    return contents.toString();
  }

  public ClusterConfig() {
File Line
org/apache/hadoop/chukwa/hicc/rest/ChartController.java 52
org/apache/hadoop/chukwa/hicc/rest/PieChartController.java 49
public class ChartController {
  static Logger LOG = Logger.getLogger(ChartController.class);
  SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");

  @Context
  VelocityEngine velocity;
  
  /**
   * Render chart using flot.js
   * 
   * @param id Reference ID of Chart stored in HBase chukwa_meta table.
   * @return html chart widget
   */
  @GET
  @Path("draw/{id}")
  @Produces(MediaType.TEXT_HTML)
  public String draw(@PathParam("id") String id) {
    VelocityContext context = new VelocityContext();
    StringWriter sw = null;
    try {
      Chart chart = ChukwaHBaseStore.getChart(id);
      List<SeriesMetaData> series = chart.getSeries();
      Gson gson = new Gson();
      String seriesMetaData = gson.toJson(series);

      context.put("chart", chart);
      context.put("seriesMetaData", seriesMetaData);
File Line
org/apache/hadoop/chukwa/ChukwaArchiveKey.java 169
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordKey.java 136
        seqId = _rio_a.readLong("seqId");
      } else {
        java.util.ArrayList<org.apache.hadoop.record.meta.FieldTypeInfo> typeInfos = (java.util.ArrayList<org.apache.hadoop.record.meta.FieldTypeInfo>) (_rio_rtiFilter
            .getFieldTypeInfos());
        org.apache.hadoop.record.meta.Utils.skip(_rio_a, typeInfos.get(_rio_i)
            .getFieldID(), typeInfos.get(_rio_i).getTypeID());
      }
    }
    _rio_a.endRecord(_rio_tag);
  }

  public int compareTo(final Object _rio_peer_) throws ClassCastException {
    if (!(_rio_peer_ instanceof ChukwaArchiveKey)) {
File Line
org/apache/hadoop/chukwa/datastore/ChukwaHBaseStore.java 235
org/apache/hadoop/chukwa/datastore/ChukwaHBaseStore.java 349
    Set<String> pk = new HashSet<String>();
    try {
      getHBaseConnection();
      Table table = connection.getTable(TableName.valueOf(CHUKWA_META));
      Scan scan = new Scan();
      scan.addFamily(KEY_NAMES);
      ResultScanner rs = table.getScanner(scan);
      Iterator<Result> it = rs.iterator();
      while (it.hasNext()) {
        Result result = it.next();
        for (Cell cell : result.rawCells()) {
          JSONObject json = (JSONObject) JSONValue.parse(new String(CellUtil.cloneValue(cell), UTF8));
          if (json!=null && json.get("type")!=null && json.get("type").equals("source")) {
File Line
org/apache/hadoop/chukwa/ChukwaArchiveKey.java 170
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordJT.java 183
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordKey.java 137
      } else {
        java.util.ArrayList<org.apache.hadoop.record.meta.FieldTypeInfo> typeInfos = (java.util.ArrayList<org.apache.hadoop.record.meta.FieldTypeInfo>) (_rio_rtiFilter
            .getFieldTypeInfos());
        org.apache.hadoop.record.meta.Utils.skip(_rio_a, typeInfos.get(_rio_i)
            .getFieldID(), typeInfos.get(_rio_i).getTypeID());
      }
    }
    _rio_a.endRecord(_rio_tag);
  }

  public int compareTo(final Object _rio_peer_) throws ClassCastException {
    if (!(_rio_peer_ instanceof ChukwaArchiveKey)) {
File Line
org/apache/hadoop/chukwa/database/DatabaseConfig.java 163
org/apache/hadoop/chukwa/database/DatabaseConfig.java 252
          partitionSize = 100 * CENTURY;
        }
        currentPartition = now / partitionSize;
        startPartition = start / partitionSize;
        endPartition = end / partitionSize;
      } else {
        fallback = false;
      }
    }

    if (startPartition != endPartition) {
      int delta = (int) (endPartition - startPartition);
      tableNames = new String[delta + 1];
      for (int i = 0; i <= delta; i++) {
        long partition = startPartition + (long) i;
        tableNames[i] = tableName + "_" + partition + tableType;
      }
    } else {
      tableNames = new String[1];
      tableNames[0] = tableName + "_" + startPartition + tableType;
    }
    return tableNames;
  }

  public String[] findTableNameForCharts(String tableName, long start, long end) {
File Line
org/apache/hadoop/chukwa/hicc/ClusterConfig.java 31
org/apache/hadoop/chukwa/hicc/Views.java 39
org/apache/hadoop/chukwa/hicc/Workspace.java 84
org/apache/hadoop/chukwa/util/ClusterConfig.java 30
  static public String getContents(File aFile) {
    // ...checks on aFile are elided
    StringBuffer contents = new StringBuffer();

    try {
      BufferedReader input = new BufferedReader(new InputStreamReader(new FileInputStream(aFile.getAbsolutePath()), Charset.forName("UTF-8")));
      try {
        String line = null; // not declared within while loop
        /*
         * readLine is a bit quirky : it returns the content of a line MINUS the
         * newline. it returns null only for the END of the stream. it returns
         * an empty String if two newlines appear in a row.
         */
        while ((line = input.readLine()) != null) {
          contents.append(line);
          contents.append(System.getProperty("line.separator"));
        }
      } finally {
        input.close();
      }
    } catch (IOException ex) {
      ex.printStackTrace();
    }

    return contents.toString();
  }

  public ClusterConfig() {
File Line
org/apache/hadoop/chukwa/analysis/salsa/visualization/Swimlanes.java 153
org/apache/hadoop/chukwa/analysis/salsa/visualization/Swimlanes.java 181
        String curr_reduce = orig_tab.getString(origrownum, "friendly_id");
        newrownum = this.plot_tab.addRow();
        
        this.plot_tab.set(newrownum, "state_name", orig_tab.getString(origrownum, "state_name"));
        this.plot_tab.set(newrownum, "ycoord", orig_tab.getInt(origrownum, "seqno"));
        this.plot_tab.set(newrownum,"hostname",orig_tab.getString(origrownum,"hostname"));
        this.plot_tab.set(newrownum,"friendly_id",orig_tab.getString(origrownum,"friendly_id"));        
        this.plot_tab.set(newrownum,START_FIELD_NAME, orig_tab.getDouble(origrownum,START_FIELD_NAME));
        this.plot_tab.set(newrownum,END_FIELD_NAME, orig_tab.getDouble(origrownum,END_FIELD_NAME));
File Line
org/apache/hadoop/chukwa/database/Aggregator.java 52
org/apache/hadoop/chukwa/hicc/Workspace.java 84
  public static String getContents(File aFile) {
    StringBuffer contents = new StringBuffer();
    try {
      BufferedReader input = new BufferedReader(new InputStreamReader(new FileInputStream(aFile.getAbsolutePath()), Charset.forName("UTF-8")));
      try {
        String line = null; // not declared within while loop
        while ((line = input.readLine()) != null) {
          contents.append(line);
          contents.append(System.getProperty("line.separator"));
        }
      } finally {
        input.close();
      }
    } catch (IOException ex) {
      ex.printStackTrace();
    }
    return contents.toString();
  }

  public void process(long start, long end, String query) throws Throwable {
File Line
org/apache/hadoop/chukwa/hicc/DatasetMapper.java 185
org/apache/hadoop/chukwa/util/DatabaseWriter.java 148
    } finally {
      // it is a good idea to release
      // resources in a finally{} block
      // in reverse-order of their creation
      // if they are no-longer needed
      if (rs != null) {
        try {
          rs.close();
        } catch (SQLException sqlEx) {
          log.debug(ExceptionUtil.getStackTrace(sqlEx));
        }
        rs = null;
      }
      if (stmt != null) {
        try {
          stmt.close();
        } catch (SQLException sqlEx) {
          log.debug(ExceptionUtil.getStackTrace(sqlEx));
        }
        stmt = null;
      }
      if (conn != null) {
        try {
          conn.close();
        } catch (SQLException sqlEx) {
          log.debug(ExceptionUtil.getStackTrace(sqlEx));
        }
        conn = null;
      }
    }
File Line
org/apache/hadoop/chukwa/database/Aggregator.java 52
org/apache/hadoop/chukwa/hicc/ClusterConfig.java 31
org/apache/hadoop/chukwa/hicc/Views.java 39
org/apache/hadoop/chukwa/util/ClusterConfig.java 30
  public static String getContents(File aFile) {
    StringBuffer contents = new StringBuffer();
    try {
      BufferedReader input = new BufferedReader(new InputStreamReader(new FileInputStream(aFile.getAbsolutePath()), Charset.forName("UTF-8")));
      try {
        String line = null; // not declared within while loop
        while ((line = input.readLine()) != null) {
          contents.append(line);
          contents.append(System.getProperty("line.separator"));
        }
      } finally {
        input.close();
      }
    } catch (IOException ex) {
      ex.printStackTrace();
    }
    return contents.toString();
  }

  public void process(long start, long end, String query) throws Throwable {
File Line
org/apache/hadoop/chukwa/rest/resource/ViewContextResolver.java 49
org/apache/hadoop/chukwa/rest/resource/WidgetContextResolver.java 46
      public ViewContextResolver() throws Exception {
          Map props = new HashMap<String, Object>();
          props.put(JSONJAXBContext.JSON_NOTATION, JSONJAXBContext.JSONNotation.MAPPED);
          props.put(JSONJAXBContext.JSON_ROOT_UNWRAPPING, Boolean.TRUE);
          props.put(JSONJAXBContext.JSON_ARRAYS, jsonArray);
          this.types = new HashSet<Class<?>>(Arrays.asList(classTypes));
          this.context = new JSONJAXBContext(classTypes, props);
      }

      public JAXBContext getContext(Class<?> objectType) {
          return (types.contains(objectType)) ? context : null;
      }

//    private final JAXBContext context;
//
//    public ViewContextResolver() throws Exception {
//      this.context = new JSONJAXBContext(JSONConfiguration.natural().build(), "package.of.your.model");
//  }
//
//  public JAXBContext getContext(Class<?> objectType) {
//      return context;
//  }

  }
File Line
org/apache/hadoop/chukwa/analysis/salsa/fsm/DataNodeClientTraceMapper.java 50
org/apache/hadoop/chukwa/analysis/salsa/fsm/TaskTrackerClientTraceMapper.java 51
    Pattern.compile(".*[a-zA-Z\\-_:\\/]([0-9]+\\.[0-9]+\\.[0-9]+\\.[0-9]+)[a-zA-Z0-9\\-_:\\/].*");

  public void map
    (ChukwaRecordKey key, ChukwaRecord val,
     OutputCollector<ChukwaRecordKey, FSMIntermedEntry> output, 
		 Reporter reporter)
    throws IOException 
  {

		/* Extract field names for checking */
		String [] fieldNames = val.getFields();
		ArrayList<String> fieldNamesList = new ArrayList<String>(fieldNames.length);
		for (int i = 0; i < fieldNames.length; i++) {
		  fieldNamesList.add(fieldNames[i]);
		}
		
		// Handle ClientTraceDetailed and DataNodeLog entries separately
		// because we need to combine both types of entries for a complete picture
		
		if (key.getReduceType().equals("ClientTraceDetailed")) {
		  assert(fieldNamesList.contains("op"));
		  if (val.getValue("op").startsWith("HDFS")) { 
File Line
org/apache/hadoop/chukwa/ChukwaArchiveKey.java 298
org/apache/hadoop/chukwa/ChukwaArchiveKey.java 349
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordJT.java 309
        {
          long i1 = org.apache.hadoop.record.Utils.readVLong(b1, s1);
          long i2 = org.apache.hadoop.record.Utils.readVLong(b2, s2);
          if (i1 != i2) {
            return ((i1 - i2) < 0) ? -1 : 0;
          }
          int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);
          int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);
          s1 += z1;
          s2 += z2;
          l1 -= z1;
          l2 -= z2;
        }
File Line
org/apache/hadoop/chukwa/analysis/salsa/fsm/DataNodeClientTraceMapper.java 150
org/apache/hadoop/chukwa/analysis/salsa/fsm/TaskTrackerClientTraceMapper.java 151
    start_rec.time_orig_epoch = k[0];
    start_rec.time_orig = Long.toString(actual_time_ms); // not actually used
    start_rec.timestamp = Long.toString(actual_time_ms);
    start_rec.time_end = "";
    start_rec.time_start = start_rec.timestamp;
    
    end_rec.time_orig_epoch = k[0];
    end_rec.time_orig = val.getValue("actual_time");
    end_rec.timestamp = val.getValue("actual_time");
    end_rec.time_end = val.getValue("actual_time");
    end_rec.time_start = "";
    
    log.debug("Duration: " + (Long.parseLong(end_rec.time_end) - Long.parseLong(start_rec.time_start)));
File Line
org/apache/hadoop/chukwa/ChukwaArchiveKey.java 361
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordJT.java 367
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordKey.java 270
        }
        return (os1 - s1);
      } catch (java.io.IOException e) {
        throw new RuntimeException(e);
      }
    }

    public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
      int ret = compareRaw(b1, s1, l1, b2, s2, l2);
      return (ret == -1) ? -1 : ((ret == 0) ? 1 : 0);
    }
  }

  static {
    org.apache.hadoop.record.RecordComparator.define(ChukwaArchiveKey.class,
File Line
org/apache/hadoop/chukwa/extraction/demux/processor/mapper/JobLogHistoryProcessor.java 156
org/apache/hadoop/chukwa/extraction/demux/processor/mapper/JobLogHistoryProcessor.java 198
org/apache/hadoop/chukwa/extraction/demux/processor/mapper/Log4jJobHistoryProcessor.java 158
org/apache/hadoop/chukwa/extraction/demux/processor/mapper/Log4jJobHistoryProcessor.java 200
        key.setKey("JobLogHist/Map/" + keys.get("JOBID") + "/"
            + keys.get("START_TIME"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("START_TIME")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("START_TIME", keys.get("START_TIME"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/Map/S");
        output.collect(key, record);

      } else if (keys.get("RECORD_TYPE").equalsIgnoreCase("MapAttempt")
File Line
org/apache/hadoop/chukwa/ChukwaArchiveKey.java 270
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordKey.java 211
        {
          int i = org.apache.hadoop.record.Utils.readVInt(b, s);
          int z = org.apache.hadoop.record.Utils.getVIntSize(i);
          s += (z + i);
          l -= (z + i);
        }
        {
          int i = org.apache.hadoop.record.Utils.readVInt(b, s);
          int z = org.apache.hadoop.record.Utils.getVIntSize(i);
          s += (z + i);
          l -= (z + i);
        }
File Line
org/apache/hadoop/chukwa/analysis/salsa/visualization/Swimlanes.java 250
org/apache/hadoop/chukwa/analysis/salsa/visualization/Swimlanes.java 274
        if (this.collate_reduces && curr_state.equals("reduce_shufflewait")) {
          this.plot_tab.setFloat(rownum,"ycoord",(float)counter);
          ArrayList<Tuple> alt = this.reducepart_hash.get(this.plot_tab.getString(rownum,"friendly_id"));
          Object [] tarr = alt.toArray();
          for (int i = 0; i < tarr.length; i++) ((Tuple)tarr[i]).setFloat("ycoord",(float)counter);
          counter++;   
        } else if (!curr_state.equals("reduce_sort") && !curr_state.equals("reduce_reducer")) {
File Line
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordJT.java 142
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordJT.java 170
    {
      org.apache.hadoop.record.Index _rio_midx1 = _rio_a.startMap("mapFields");
      mapFields = new java.util.TreeMap<String, org.apache.hadoop.record.Buffer>();
      for (; !_rio_midx1.done(); _rio_midx1.incr()) {
        String _rio_k1;
        _rio_k1 = _rio_a.readString("_rio_k1");
        org.apache.hadoop.record.Buffer _rio_v1;
        _rio_v1 = _rio_a.readBuffer("_rio_v1");
        mapFields.put(_rio_k1, _rio_v1);
      }
      _rio_a.endMap("mapFields");
    }