chukwa-dev mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ey...@apache.org
Subject [2/3] chukwa git commit: CHUKWA-802. Updated Javadoc for Java 8 support. (Eric Yang)
Date Sat, 02 Apr 2016 23:14:40 GMT
http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/gora/ChukwaChunk.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/gora/ChukwaChunk.java b/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/gora/ChukwaChunk.java
index 61c61f0..cde6d90 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/gora/ChukwaChunk.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/gora/ChukwaChunk.java
@@ -20,21 +20,29 @@
  * 
  * DO NOT EDIT DIRECTLY
  */
-package org.apache.hadoop.chukwa.datacollection.writer.gora;  
-/** Chukwa Adaptors emit data in Chunks. A Chunk is a sequence of bytes, with some metadata. Several of these are set automatically by the Agent or Adaptors. Two of them require user intervention: cluster name and datatype. Cluster name is specified in conf/chukwa-agent-conf.xml, and is global to each Agent process. Datatype describes the expected format of the data collected by an Adaptor instance, and it is specified when that instance is started.  */
+package org.apache.hadoop.chukwa.datacollection.writer.gora;
+
+/**
+ * Chukwa Adaptors emit data in Chunks. A Chunk is a sequence of bytes, with
+ * some metadata. Several of these are set automatically by the Agent or
+ * Adaptors. Two of them require user intervention: cluster name and datatype.
+ * Cluster name is specified in conf/chukwa-agent-conf.xml, and is global to
+ * each Agent process. Datatype describes the expected format of the data
+ * collected by an Adaptor instance, and it is specified when that instance is
+ * started.
+ */
 @SuppressWarnings("all")
-public class ChukwaChunk extends org.apache.gora.persistency.impl.PersistentBase implements org.apache.avro.specific.SpecificRecord, org.apache.gora.persistency.Persistent {
-  public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"ChukwaChunk\",\"namespace\":\"org.apache.hadoop.chukwa.datacollection.writer.gora\",\"doc\":\"Chukwa Adaptors emit data in Chunks. A Chunk is a sequence of bytes, with some metadata. Several of these are set automatically by the Agent or Adaptors. Two of them require user intervention: cluster name and datatype. Cluster name is specified in conf/chukwa-agent-conf.xml, and is global to each Agent process. Datatype describes the expected format of the data collected by an Adaptor instance, and it is specified when that instance is started. \",\"fields\":[{\"name\":\"source\",\"type\":[\"null\",\"string\"],\"default\":null},{\"name\":\"tags\",\"type\":[\"null\",\"string\"],\"default\":null},{\"name\":\"datatype\",\"type\":[\"null\",\"string\"],\"default\":null},{\"name\":\"sequenceID\",\"type\":[\"null\",\"long\"],\"default\":null},{\"name\":\"name\",\"type\
 ":[\"null\",\"string\"],\"default\":null},{\"name\":\"data\",\"type\":[\"null\",\"bytes\"],\"default\":null}]}");
+public class ChukwaChunk extends org.apache.gora.persistency.impl.PersistentBase
+    implements org.apache.avro.specific.SpecificRecord,
+    org.apache.gora.persistency.Persistent {
+  public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser()
+      .parse(
+          "{\"type\":\"record\",\"name\":\"ChukwaChunk\",\"namespace\":\"org.apache.hadoop.chukwa.datacollection.writer.gora\",\"doc\":\"Chukwa Adaptors emit data in Chunks. A Chunk is a sequence of bytes, with some metadata. Several of these are set automatically by the Agent or Adaptors. Two of them require user intervention: cluster name and datatype. Cluster name is specified in conf/chukwa-agent-conf.xml, and is global to each Agent process. Datatype describes the expected format of the data collected by an Adaptor instance, and it is specified when that instance is started. \",\"fields\":[{\"name\":\"source\",\"type\":[\"null\",\"string\"],\"default\":null},{\"name\":\"tags\",\"type\":[\"null\",\"string\"],\"default\":null},{\"name\":\"datatype\",\"type\":[\"null\",\"string\"],\"default\":null},{\"name\":\"sequenceID\",\"type\":[\"null\",\"long\"],\"default\":null},{\"name\":\"name\",\"type\":[\"null\",\"string\"],\"default\":null},{\"name\":\"data\",\"type\":[\"null\",\"bytes
 \"],\"default\":null}]}");
 
   /** Enum containing all data bean's fields. */
   public static enum Field {
-    SOURCE(0, "source"),
-    TAGS(1, "tags"),
-    DATATYPE(2, "datatype"),
-    SEQUENCE_ID(3, "sequenceID"),
-    NAME(4, "name"),
-    DATA(5, "data"),
-    ;
+    SOURCE(0, "source"), TAGS(1, "tags"), DATATYPE(2, "datatype"), SEQUENCE_ID(
+        3, "sequenceID"), NAME(4, "name"), DATA(5, "data"),;
     /**
      * Field's index.
      */
@@ -47,41 +55,51 @@ public class ChukwaChunk extends org.apache.gora.persistency.impl.PersistentBase
 
     /**
      * Field's constructor
-     * @param index field's index.
-     * @param name field's name.
+     * 
+     * @param index
+     *          field's index.
+     * @param name
+     *          field's name.
      */
-    Field(int index, String name) {this.index=index;this.name=name;}
+    Field(int index, String name) {
+      this.index = index;
+      this.name = name;
+    }
 
     /**
      * Gets field's index.
+     * 
      * @return int field's index.
      */
-    public int getIndex() {return index;}
+    public int getIndex() {
+      return index;
+    }
 
     /**
      * Gets field's name.
+     * 
      * @return String field's name.
      */
-    public String getName() {return name;}
+    public String getName() {
+      return name;
+    }
 
     /**
      * Gets field's attributes to string.
+     * 
      * @return String field's attributes to string.
      */
-    public String toString() {return name;}
+    public String toString() {
+      return name;
+    }
   };
 
-  public static final String[] _ALL_FIELDS = {
-  "source",
-  "tags",
-  "datatype",
-  "sequenceID",
-  "name",
-  "data",
-  };
+  public static final String[] _ALL_FIELDS = { "source", "tags", "datatype",
+      "sequenceID", "name", "data", };
 
   /**
    * Gets the total field count.
+   * 
    * @return int field count
    */
   public int getFieldsCount() {
@@ -94,36 +112,62 @@ public class ChukwaChunk extends org.apache.gora.persistency.impl.PersistentBase
   private java.lang.Long sequenceID;
   private java.lang.CharSequence name;
   private java.nio.ByteBuffer data;
-  public org.apache.avro.Schema getSchema() { return SCHEMA$; }
-  // Used by DatumWriter.  Applications should not call. 
+
+  public org.apache.avro.Schema getSchema() {
+    return SCHEMA$;
+  }
+
+  // Used by DatumWriter. Applications should not call.
   public java.lang.Object get(int field$) {
     switch (field$) {
-    case 0: return source;
-    case 1: return tags;
-    case 2: return datatype;
-    case 3: return sequenceID;
-    case 4: return name;
-    case 5: return data;
-    default: throw new org.apache.avro.AvroRuntimeException("Bad index");
+    case 0:
+      return source;
+    case 1:
+      return tags;
+    case 2:
+      return datatype;
+    case 3:
+      return sequenceID;
+    case 4:
+      return name;
+    case 5:
+      return data;
+    default:
+      throw new org.apache.avro.AvroRuntimeException("Bad index");
     }
   }
-  
-  // Used by DatumReader.  Applications should not call. 
-  @SuppressWarnings(value="unchecked")
+
+  // Used by DatumReader. Applications should not call.
+  @SuppressWarnings(value = "unchecked")
   public void put(int field$, java.lang.Object value) {
     switch (field$) {
-    case 0: source = (java.lang.CharSequence)(value); break;
-    case 1: tags = (java.lang.CharSequence)(value); break;
-    case 2: datatype = (java.lang.CharSequence)(value); break;
-    case 3: sequenceID = (java.lang.Long)(value); break;
-    case 4: name = (java.lang.CharSequence)(value); break;
-    case 5: data = (java.nio.ByteBuffer)(value); break;
-    default: throw new org.apache.avro.AvroRuntimeException("Bad index");
+    case 0:
+      source = (java.lang.CharSequence) (value);
+      break;
+    case 1:
+      tags = (java.lang.CharSequence) (value);
+      break;
+    case 2:
+      datatype = (java.lang.CharSequence) (value);
+      break;
+    case 3:
+      sequenceID = (java.lang.Long) (value);
+      break;
+    case 4:
+      name = (java.lang.CharSequence) (value);
+      break;
+    case 5:
+      data = (java.nio.ByteBuffer) (value);
+      break;
+    default:
+      throw new org.apache.avro.AvroRuntimeException("Bad index");
     }
   }
 
   /**
    * Gets the value of the 'source' field.
+   * 
+   * @return source
    */
   public java.lang.CharSequence getSource() {
     return source;
@@ -131,16 +175,19 @@ public class ChukwaChunk extends org.apache.gora.persistency.impl.PersistentBase
 
   /**
    * Sets the value of the 'source' field.
-   * @param value the value to set.
+   * 
+   * @param value is the value to set.
    */
   public void setSource(java.lang.CharSequence value) {
     this.source = value;
     setDirty(0);
   }
-  
+
   /**
-   * Checks the dirty status of the 'source' field. A field is dirty if it represents a change that has not yet been written to the database.
-   * @param value the value to set.
+   * Checks the dirty status of the 'source' field. A field is dirty if it
+   * represents a change that has not yet been written to the database.
+   * 
+   * @return true if a sourcefield has not been written to database
    */
   public boolean isSourceDirty() {
     return isDirty(0);
@@ -148,6 +195,8 @@ public class ChukwaChunk extends org.apache.gora.persistency.impl.PersistentBase
 
   /**
    * Gets the value of the 'tags' field.
+   * 
+   * @return value of tags field
    */
   public java.lang.CharSequence getTags() {
     return tags;
@@ -155,16 +204,19 @@ public class ChukwaChunk extends org.apache.gora.persistency.impl.PersistentBase
 
   /**
    * Sets the value of the 'tags' field.
-   * @param value the value to set.
+   * 
+   * @param value is the value to set.
    */
   public void setTags(java.lang.CharSequence value) {
     this.tags = value;
     setDirty(1);
   }
-  
+
   /**
-   * Checks the dirty status of the 'tags' field. A field is dirty if it represents a change that has not yet been written to the database.
-   * @param value the value to set.
+   * Checks the dirty status of the 'tags' field. A field is dirty if it
+   * represents a change that has not yet been written to the database.
+   * 
+   * @return true if tags field has not been written to database
    */
   public boolean isTagsDirty() {
     return isDirty(1);
@@ -172,6 +224,8 @@ public class ChukwaChunk extends org.apache.gora.persistency.impl.PersistentBase
 
   /**
    * Gets the value of the 'datatype' field.
+   * 
+   * @return datatype field
    */
   public java.lang.CharSequence getDatatype() {
     return datatype;
@@ -179,16 +233,19 @@ public class ChukwaChunk extends org.apache.gora.persistency.impl.PersistentBase
 
   /**
    * Sets the value of the 'datatype' field.
-   * @param value the value to set.
+   * 
+   * @param value is the value to set.
    */
   public void setDatatype(java.lang.CharSequence value) {
     this.datatype = value;
     setDirty(2);
   }
-  
+
   /**
-   * Checks the dirty status of the 'datatype' field. A field is dirty if it represents a change that has not yet been written to the database.
-   * @param value the value to set.
+   * Checks the dirty status of the 'datatype' field. A field is dirty if it
+   * represents a change that has not yet been written to the database.
+   * 
+   * @return true if datatype field has not been written to database
    */
   public boolean isDatatypeDirty() {
     return isDirty(2);
@@ -196,6 +253,8 @@ public class ChukwaChunk extends org.apache.gora.persistency.impl.PersistentBase
 
   /**
    * Gets the value of the 'sequenceID' field.
+   * 
+   * @return sequenceID
    */
   public java.lang.Long getSequenceID() {
     return sequenceID;
@@ -203,16 +262,19 @@ public class ChukwaChunk extends org.apache.gora.persistency.impl.PersistentBase
 
   /**
    * Sets the value of the 'sequenceID' field.
-   * @param value the value to set.
+   * 
+   * @param value is the value to set.
    */
   public void setSequenceID(java.lang.Long value) {
     this.sequenceID = value;
     setDirty(3);
   }
-  
+
   /**
-   * Checks the dirty status of the 'sequenceID' field. A field is dirty if it represents a change that has not yet been written to the database.
-   * @param value the value to set.
+   * Checks the dirty status of the 'sequenceID' field. A field is dirty if it
+   * represents a change that has not yet been written to the database.
+   * 
+   * @return true if sequenceID has not been commit to database
    */
   public boolean isSequenceIDDirty() {
     return isDirty(3);
@@ -220,6 +282,8 @@ public class ChukwaChunk extends org.apache.gora.persistency.impl.PersistentBase
 
   /**
    * Gets the value of the 'name' field.
+   * 
+   * @return name
    */
   public java.lang.CharSequence getName() {
     return name;
@@ -227,16 +291,19 @@ public class ChukwaChunk extends org.apache.gora.persistency.impl.PersistentBase
 
   /**
    * Sets the value of the 'name' field.
-   * @param value the value to set.
+   * 
+   * @param value is the value to set.
    */
   public void setName(java.lang.CharSequence value) {
     this.name = value;
     setDirty(4);
   }
-  
+
   /**
-   * Checks the dirty status of the 'name' field. A field is dirty if it represents a change that has not yet been written to the database.
-   * @param value the value to set.
+   * Checks the dirty status of the 'name' field. A field is dirty if it
+   * represents a change that has not yet been written to the database.
+   * 
+   * @return true if name has not been committed to database
    */
   public boolean isNameDirty() {
     return isDirty(4);
@@ -244,6 +311,8 @@ public class ChukwaChunk extends org.apache.gora.persistency.impl.PersistentBase
 
   /**
    * Gets the value of the 'data' field.
+   * 
+   * @return data field
    */
   public java.nio.ByteBuffer getData() {
     return data;
@@ -251,36 +320,59 @@ public class ChukwaChunk extends org.apache.gora.persistency.impl.PersistentBase
 
   /**
    * Sets the value of the 'data' field.
-   * @param value the value to set.
+   * 
+   * @param value is a string
+   *          the value to set.
    */
   public void setData(java.nio.ByteBuffer value) {
     this.data = value;
     setDirty(5);
   }
-  
+
   /**
-   * Checks the dirty status of the 'data' field. A field is dirty if it represents a change that has not yet been written to the database.
-   * @param value the value to set.
+   * Checks the dirty status of the 'data' field. A field is dirty if it
+   * represents a change that has not yet been written to the database.
+   * 
+   * @return true if data field has not been committed to database
    */
   public boolean isDataDirty() {
     return isDirty(5);
   }
 
-  /** Creates a new ChukwaChunk RecordBuilder */
+  /**
+   * Creates a new ChukwaChunk RecordBuilder
+   * 
+   * @return RecordBuilder
+   */
   public static org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk.Builder newBuilder() {
     return new org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk.Builder();
   }
-  
-  /** Creates a new ChukwaChunk RecordBuilder by copying an existing Builder */
-  public static org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk.Builder newBuilder(org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk.Builder other) {
-    return new org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk.Builder(other);
+
+  /**
+   * Creates a new ChukwaChunk RecordBuilder by copying an existing Builder
+   * 
+   * @param other is Chukwa chunk
+   * @return RecordBuilder
+   */
+  public static org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk.Builder newBuilder(
+      org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk.Builder other) {
+    return new org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk.Builder(
+        other);
   }
-  
-  /** Creates a new ChukwaChunk RecordBuilder by copying an existing ChukwaChunk instance */
-  public static org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk.Builder newBuilder(org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk other) {
-    return new org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk.Builder(other);
+
+  /**
+   * Creates a new ChukwaChunk RecordBuilder by copying an existing ChukwaChunk
+   * instance
+   * 
+   * @param other is Chukwa chunk
+   * @return RecordBuilder
+   */
+  public static org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk.Builder newBuilder(
+      org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk other) {
+    return new org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk.Builder(
+        other);
   }
-  
+
   private static java.nio.ByteBuffer deepCopyToReadOnlyBuffer(
       java.nio.ByteBuffer input) {
     java.nio.ByteBuffer copy = java.nio.ByteBuffer.allocate(input.capacity());
@@ -303,12 +395,13 @@ public class ChukwaChunk extends org.apache.gora.persistency.impl.PersistentBase
     copy.limit(limit);
     return copy.asReadOnlyBuffer();
   }
-  
+
   /**
    * RecordBuilder for ChukwaChunk instances.
    */
-  public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase<ChukwaChunk>
-    implements org.apache.avro.data.RecordBuilder<ChukwaChunk> {
+  public static class Builder
+      extends org.apache.avro.specific.SpecificRecordBuilderBase<ChukwaChunk>
+      implements org.apache.avro.data.RecordBuilder<ChukwaChunk> {
 
     private java.lang.CharSequence source;
     private java.lang.CharSequence tags;
@@ -319,364 +412,518 @@ public class ChukwaChunk extends org.apache.gora.persistency.impl.PersistentBase
 
     /** Creates a new Builder */
     private Builder() {
-      super(org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk.SCHEMA$);
+      super(
+          org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk.SCHEMA$);
     }
-    
+
     /** Creates a Builder by copying an existing Builder */
-    private Builder(org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk.Builder other) {
+    private Builder(
+        org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk.Builder other) {
       super(other);
     }
-    
+
     /** Creates a Builder by copying an existing ChukwaChunk instance */
-    private Builder(org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk other) {
-            super(org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk.SCHEMA$);
+    private Builder(
+        org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk other) {
+      super(
+          org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk.SCHEMA$);
       if (isValidValue(fields()[0], other.source)) {
-        this.source = (java.lang.CharSequence) data().deepCopy(fields()[0].schema(), other.source);
+        this.source = (java.lang.CharSequence) data()
+            .deepCopy(fields()[0].schema(), other.source);
         fieldSetFlags()[0] = true;
       }
       if (isValidValue(fields()[1], other.tags)) {
-        this.tags = (java.lang.CharSequence) data().deepCopy(fields()[1].schema(), other.tags);
+        this.tags = (java.lang.CharSequence) data()
+            .deepCopy(fields()[1].schema(), other.tags);
         fieldSetFlags()[1] = true;
       }
       if (isValidValue(fields()[2], other.datatype)) {
-        this.datatype = (java.lang.CharSequence) data().deepCopy(fields()[2].schema(), other.datatype);
+        this.datatype = (java.lang.CharSequence) data()
+            .deepCopy(fields()[2].schema(), other.datatype);
         fieldSetFlags()[2] = true;
       }
       if (isValidValue(fields()[3], other.sequenceID)) {
-        this.sequenceID = (java.lang.Long) data().deepCopy(fields()[3].schema(), other.sequenceID);
+        this.sequenceID = (java.lang.Long) data().deepCopy(fields()[3].schema(),
+            other.sequenceID);
         fieldSetFlags()[3] = true;
       }
       if (isValidValue(fields()[4], other.name)) {
-        this.name = (java.lang.CharSequence) data().deepCopy(fields()[4].schema(), other.name);
+        this.name = (java.lang.CharSequence) data()
+            .deepCopy(fields()[4].schema(), other.name);
         fieldSetFlags()[4] = true;
       }
       if (isValidValue(fields()[5], other.data)) {
-        this.data = (java.nio.ByteBuffer) data().deepCopy(fields()[5].schema(), other.data);
+        this.data = (java.nio.ByteBuffer) data().deepCopy(fields()[5].schema(),
+            other.data);
         fieldSetFlags()[5] = true;
       }
     }
 
-    /** Gets the value of the 'source' field */
+    /**
+     * Gets the value of the 'source' field
+     * 
+     * @return source field
+     */
     public java.lang.CharSequence getSource() {
       return source;
     }
-    
-    /** Sets the value of the 'source' field */
-    public org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk.Builder setSource(java.lang.CharSequence value) {
+
+    /**
+     * Sets the value of the 'source' field
+     * 
+     * @param value is a string
+     * @return RecordBuilder
+     */
+    public org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk.Builder setSource(
+        java.lang.CharSequence value) {
       validate(fields()[0], value);
       this.source = value;
       fieldSetFlags()[0] = true;
-      return this; 
+      return this;
     }
-    
-    /** Checks whether the 'source' field has been set */
+
+    /**
+     * Checks whether the 'source' field has been set
+     * 
+     * @return true if source field has been set
+     */
     public boolean hasSource() {
       return fieldSetFlags()[0];
     }
-    
-    /** Clears the value of the 'source' field */
+
+    /**
+     * Clears the value of the 'source' field
+     * 
+     * @return RecordBuilder
+     */
     public org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk.Builder clearSource() {
       source = null;
       fieldSetFlags()[0] = false;
       return this;
     }
-    
-    /** Gets the value of the 'tags' field */
+
+    /**
+     * Gets the value of the 'tags' field
+     * 
+     * @return tags field
+     */
     public java.lang.CharSequence getTags() {
       return tags;
     }
-    
-    /** Sets the value of the 'tags' field */
-    public org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk.Builder setTags(java.lang.CharSequence value) {
+
+    /**
+     * Sets the value of the 'tags' field
+     * 
+     * @param value is a string
+     * @return RecordBuilder
+     */
+    public org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk.Builder setTags(
+        java.lang.CharSequence value) {
       validate(fields()[1], value);
       this.tags = value;
       fieldSetFlags()[1] = true;
-      return this; 
+      return this;
     }
-    
-    /** Checks whether the 'tags' field has been set */
+
+    /**
+     * Checks whether the 'tags' field has been set
+     * 
+     * @return true if tags has been set
+     */
     public boolean hasTags() {
       return fieldSetFlags()[1];
     }
-    
-    /** Clears the value of the 'tags' field */
+
+    /**
+     * Clears the value of the 'tags' field
+     * 
+     * @return RecordBuilder
+     */
     public org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk.Builder clearTags() {
       tags = null;
       fieldSetFlags()[1] = false;
       return this;
     }
-    
-    /** Gets the value of the 'datatype' field */
+
+    /**
+     * Gets the value of the 'datatype' field
+     * 
+     * @return datatype field
+     */
     public java.lang.CharSequence getDatatype() {
       return datatype;
     }
-    
-    /** Sets the value of the 'datatype' field */
-    public org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk.Builder setDatatype(java.lang.CharSequence value) {
+
+    /**
+     * Sets the value of the 'datatype' field
+     * 
+     * @param value is a string
+     * @return RecordBuilder
+     */
+    public org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk.Builder setDatatype(
+        java.lang.CharSequence value) {
       validate(fields()[2], value);
       this.datatype = value;
       fieldSetFlags()[2] = true;
-      return this; 
+      return this;
     }
-    
-    /** Checks whether the 'datatype' field has been set */
+
+    /**
+     * Checks whether the 'datatype' field has been set
+     * 
+     * @return true if datatype field has been set
+     */
     public boolean hasDatatype() {
       return fieldSetFlags()[2];
     }
-    
-    /** Clears the value of the 'datatype' field */
+
+    /**
+     * Clears the value of the 'datatype' field
+     * 
+     * @return RecordBuilder
+     */
     public org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk.Builder clearDatatype() {
       datatype = null;
       fieldSetFlags()[2] = false;
       return this;
     }
-    
-    /** Gets the value of the 'sequenceID' field */
+
+    /**
+     * Gets the value of the 'sequenceID' field
+     * 
+     * @return sequenceID
+     */
     public java.lang.Long getSequenceID() {
       return sequenceID;
     }
-    
-    /** Sets the value of the 'sequenceID' field */
-    public org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk.Builder setSequenceID(java.lang.Long value) {
+
+    /**
+     * Sets the value of the 'sequenceID' field
+     * 
+     * @param value is a string
+     * @return RecordBuilder
+     */
+    public org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk.Builder setSequenceID(
+        java.lang.Long value) {
       validate(fields()[3], value);
       this.sequenceID = value;
       fieldSetFlags()[3] = true;
-      return this; 
+      return this;
     }
-    
-    /** Checks whether the 'sequenceID' field has been set */
+
+    /**
+     * Checks whether the 'sequenceID' field has been set
+     * 
+     * @return true if sequenceID has been set
+     */
     public boolean hasSequenceID() {
       return fieldSetFlags()[3];
     }
-    
-    /** Clears the value of the 'sequenceID' field */
+
+    /**
+     * Clears the value of the 'sequenceID' field
+     * 
+     * @return RecordBuilder
+     */
     public org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk.Builder clearSequenceID() {
       sequenceID = null;
       fieldSetFlags()[3] = false;
       return this;
     }
-    
-    /** Gets the value of the 'name' field */
+
+    /**
+     * Gets the value of the 'name' field
+     * 
+     * @return name
+     */
     public java.lang.CharSequence getName() {
       return name;
     }
-    
-    /** Sets the value of the 'name' field */
-    public org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk.Builder setName(java.lang.CharSequence value) {
+
+    /**
+     * Sets the value of the 'name' field
+     * 
+     * @param value is a string
+     * @return RecordBuilder
+     */
+    public org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk.Builder setName(
+        java.lang.CharSequence value) {
       validate(fields()[4], value);
       this.name = value;
       fieldSetFlags()[4] = true;
-      return this; 
+      return this;
     }
-    
-    /** Checks whether the 'name' field has been set */
+
+    /**
+     * Checks whether the 'name' field has been set
+     * 
+     * @return true if name field has been set
+     */
     public boolean hasName() {
       return fieldSetFlags()[4];
     }
-    
-    /** Clears the value of the 'name' field */
+
+    /**
+     * Clears the value of the 'name' field
+     * 
+     * @return RecordBuilder
+     */
     public org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk.Builder clearName() {
       name = null;
       fieldSetFlags()[4] = false;
       return this;
     }
-    
-    /** Gets the value of the 'data' field */
+
+    /**
+     * Gets the value of the 'data' field
+     * 
+     * @return data
+     */
     public java.nio.ByteBuffer getData() {
       return data;
     }
-    
-    /** Sets the value of the 'data' field */
-    public org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk.Builder setData(java.nio.ByteBuffer value) {
+
+    /**
+     * Sets the value of the 'data' field
+     * 
+     * @param value is a string
+     * @return RecordBudiler
+     */
+    public org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk.Builder setData(
+        java.nio.ByteBuffer value) {
       validate(fields()[5], value);
       this.data = value;
       fieldSetFlags()[5] = true;
-      return this; 
+      return this;
     }
-    
-    /** Checks whether the 'data' field has been set */
+
+    /**
+     * Checks whether the 'data' field has been set
+     * 
+     * @return true if data field has been set
+     */
     public boolean hasData() {
       return fieldSetFlags()[5];
     }
-    
-    /** Clears the value of the 'data' field */
+
+    /**
+     * Clears the value of the 'data' field
+     * 
+     * @return RecordBuilder
+     */
     public org.apache.hadoop.chukwa.datacollection.writer.gora.ChukwaChunk.Builder clearData() {
       data = null;
       fieldSetFlags()[5] = false;
       return this;
     }
-    
+
     @Override
     public ChukwaChunk build() {
       try {
         ChukwaChunk record = new ChukwaChunk();
-        record.source = fieldSetFlags()[0] ? this.source : (java.lang.CharSequence) defaultValue(fields()[0]);
-        record.tags = fieldSetFlags()[1] ? this.tags : (java.lang.CharSequence) defaultValue(fields()[1]);
-        record.datatype = fieldSetFlags()[2] ? this.datatype : (java.lang.CharSequence) defaultValue(fields()[2]);
-        record.sequenceID = fieldSetFlags()[3] ? this.sequenceID : (java.lang.Long) defaultValue(fields()[3]);
-        record.name = fieldSetFlags()[4] ? this.name : (java.lang.CharSequence) defaultValue(fields()[4]);
-        record.data = fieldSetFlags()[5] ? this.data : (java.nio.ByteBuffer) defaultValue(fields()[5]);
+        record.source = fieldSetFlags()[0] ? this.source
+            : (java.lang.CharSequence) defaultValue(fields()[0]);
+        record.tags = fieldSetFlags()[1] ? this.tags
+            : (java.lang.CharSequence) defaultValue(fields()[1]);
+        record.datatype = fieldSetFlags()[2] ? this.datatype
+            : (java.lang.CharSequence) defaultValue(fields()[2]);
+        record.sequenceID = fieldSetFlags()[3] ? this.sequenceID
+            : (java.lang.Long) defaultValue(fields()[3]);
+        record.name = fieldSetFlags()[4] ? this.name
+            : (java.lang.CharSequence) defaultValue(fields()[4]);
+        record.data = fieldSetFlags()[5] ? this.data
+            : (java.nio.ByteBuffer) defaultValue(fields()[5]);
         return record;
       } catch (Exception e) {
         throw new org.apache.avro.AvroRuntimeException(e);
       }
     }
   }
-  
-  public ChukwaChunk.Tombstone getTombstone(){
-  	return TOMBSTONE;
+
+  public ChukwaChunk.Tombstone getTombstone() {
+    return TOMBSTONE;
   }
 
-  public ChukwaChunk newInstance(){
+  public ChukwaChunk newInstance() {
     return newBuilder().build();
   }
 
   private static final Tombstone TOMBSTONE = new Tombstone();
-  
-  public static final class Tombstone extends ChukwaChunk implements org.apache.gora.persistency.Tombstone {
-  
-      private Tombstone() { }
-  
-	  		  /**
-	   * Gets the value of the 'source' field.
-		   */
-	  public java.lang.CharSequence getSource() {
-	    throw new java.lang.UnsupportedOperationException("Get is not supported on tombstones");
-	  }
-	
-	  /**
-	   * Sets the value of the 'source' field.
-		   * @param value the value to set.
-	   */
-	  public void setSource(java.lang.CharSequence value) {
-	    throw new java.lang.UnsupportedOperationException("Set is not supported on tombstones");
-	  }
-	  
-	  /**
-	   * Checks the dirty status of the 'source' field. A field is dirty if it represents a change that has not yet been written to the database.
-		   * @param value the value to set.
-	   */
-	  public boolean isSourceDirty() {
-	    throw new java.lang.UnsupportedOperationException("IsDirty is not supported on tombstones");
-	  }
-	
-				  /**
-	   * Gets the value of the 'tags' field.
-		   */
-	  public java.lang.CharSequence getTags() {
-	    throw new java.lang.UnsupportedOperationException("Get is not supported on tombstones");
-	  }
-	
-	  /**
-	   * Sets the value of the 'tags' field.
-		   * @param value the value to set.
-	   */
-	  public void setTags(java.lang.CharSequence value) {
-	    throw new java.lang.UnsupportedOperationException("Set is not supported on tombstones");
-	  }
-	  
-	  /**
-	   * Checks the dirty status of the 'tags' field. A field is dirty if it represents a change that has not yet been written to the database.
-		   * @param value the value to set.
-	   */
-	  public boolean isTagsDirty() {
-	    throw new java.lang.UnsupportedOperationException("IsDirty is not supported on tombstones");
-	  }
-	
-				  /**
-	   * Gets the value of the 'datatype' field.
-		   */
-	  public java.lang.CharSequence getDatatype() {
-	    throw new java.lang.UnsupportedOperationException("Get is not supported on tombstones");
-	  }
-	
-	  /**
-	   * Sets the value of the 'datatype' field.
-		   * @param value the value to set.
-	   */
-	  public void setDatatype(java.lang.CharSequence value) {
-	    throw new java.lang.UnsupportedOperationException("Set is not supported on tombstones");
-	  }
-	  
-	  /**
-	   * Checks the dirty status of the 'datatype' field. A field is dirty if it represents a change that has not yet been written to the database.
-		   * @param value the value to set.
-	   */
-	  public boolean isDatatypeDirty() {
-	    throw new java.lang.UnsupportedOperationException("IsDirty is not supported on tombstones");
-	  }
-	
-				  /**
-	   * Gets the value of the 'sequenceID' field.
-		   */
-	  public java.lang.Long getSequenceID() {
-	    throw new java.lang.UnsupportedOperationException("Get is not supported on tombstones");
-	  }
-	
-	  /**
-	   * Sets the value of the 'sequenceID' field.
-		   * @param value the value to set.
-	   */
-	  public void setSequenceID(java.lang.Long value) {
-	    throw new java.lang.UnsupportedOperationException("Set is not supported on tombstones");
-	  }
-	  
-	  /**
-	   * Checks the dirty status of the 'sequenceID' field. A field is dirty if it represents a change that has not yet been written to the database.
-		   * @param value the value to set.
-	   */
-	  public boolean isSequenceIDDirty() {
-	    throw new java.lang.UnsupportedOperationException("IsDirty is not supported on tombstones");
-	  }
-	
-				  /**
-	   * Gets the value of the 'name' field.
-		   */
-	  public java.lang.CharSequence getName() {
-	    throw new java.lang.UnsupportedOperationException("Get is not supported on tombstones");
-	  }
-	
-	  /**
-	   * Sets the value of the 'name' field.
-		   * @param value the value to set.
-	   */
-	  public void setName(java.lang.CharSequence value) {
-	    throw new java.lang.UnsupportedOperationException("Set is not supported on tombstones");
-	  }
-	  
-	  /**
-	   * Checks the dirty status of the 'name' field. A field is dirty if it represents a change that has not yet been written to the database.
-		   * @param value the value to set.
-	   */
-	  public boolean isNameDirty() {
-	    throw new java.lang.UnsupportedOperationException("IsDirty is not supported on tombstones");
-	  }
-	
-				  /**
-	   * Gets the value of the 'data' field.
-		   */
-	  public java.nio.ByteBuffer getData() {
-	    throw new java.lang.UnsupportedOperationException("Get is not supported on tombstones");
-	  }
-	
-	  /**
-	   * Sets the value of the 'data' field.
-		   * @param value the value to set.
-	   */
-	  public void setData(java.nio.ByteBuffer value) {
-	    throw new java.lang.UnsupportedOperationException("Set is not supported on tombstones");
-	  }
-	  
-	  /**
-	   * Checks the dirty status of the 'data' field. A field is dirty if it represents a change that has not yet been written to the database.
-		   * @param value the value to set.
-	   */
-	  public boolean isDataDirty() {
-	    throw new java.lang.UnsupportedOperationException("IsDirty is not supported on tombstones");
-	  }
-	
-		  
+
+  public static final class Tombstone extends ChukwaChunk
+      implements org.apache.gora.persistency.Tombstone {
+
+    private Tombstone() {
+    }
+
+    /**
+     * Gets the value of the 'source' field.
+     */
+    public java.lang.CharSequence getSource() {
+      throw new java.lang.UnsupportedOperationException(
+          "Get is not supported on tombstones");
+    }
+
+    /**
+     * Sets the value of the 'source' field.
+     * 
+     * @param value
+     *          the value to set.
+     */
+    public void setSource(java.lang.CharSequence value) {
+      throw new java.lang.UnsupportedOperationException(
+          "Set is not supported on tombstones");
+    }
+
+    /**
+     * Checks the dirty status of the 'source' field. A field is dirty if it
+     * represents a change that has not yet been written to the database.
+     */
+    public boolean isSourceDirty() {
+      throw new java.lang.UnsupportedOperationException(
+          "IsDirty is not supported on tombstones");
+    }
+
+    /**
+     * Gets the value of the 'tags' field.
+     */
+    public java.lang.CharSequence getTags() {
+      throw new java.lang.UnsupportedOperationException(
+          "Get is not supported on tombstones");
+    }
+
+    /**
+     * Sets the value of the 'tags' field.
+     * 
+     * @param value
+     *          the value to set.
+     */
+    public void setTags(java.lang.CharSequence value) {
+      throw new java.lang.UnsupportedOperationException(
+          "Set is not supported on tombstones");
+    }
+
+    /**
+     * Checks the dirty status of the 'tags' field. A field is dirty if it
+     * represents a change that has not yet been written to the database.
+     */
+    public boolean isTagsDirty() {
+      throw new java.lang.UnsupportedOperationException(
+          "IsDirty is not supported on tombstones");
+    }
+
+    /**
+     * Gets the value of the 'datatype' field.
+     */
+    public java.lang.CharSequence getDatatype() {
+      throw new java.lang.UnsupportedOperationException(
+          "Get is not supported on tombstones");
+    }
+
+    /**
+     * Sets the value of the 'datatype' field.
+     * 
+     * @param value
+     *          the value to set.
+     */
+    public void setDatatype(java.lang.CharSequence value) {
+      throw new java.lang.UnsupportedOperationException(
+          "Set is not supported on tombstones");
+    }
+
+    /**
+     * Checks the dirty status of the 'datatype' field. A field is dirty if it
+     * represents a change that has not yet been written to the database.
+     */
+    public boolean isDatatypeDirty() {
+      throw new java.lang.UnsupportedOperationException(
+          "IsDirty is not supported on tombstones");
+    }
+
+    /**
+     * Gets the value of the 'sequenceID' field.
+     */
+    public java.lang.Long getSequenceID() {
+      throw new java.lang.UnsupportedOperationException(
+          "Get is not supported on tombstones");
+    }
+
+    /**
+     * Sets the value of the 'sequenceID' field.
+     * 
+     * @param value
+     *          the value to set.
+     */
+    public void setSequenceID(java.lang.Long value) {
+      throw new java.lang.UnsupportedOperationException(
+          "Set is not supported on tombstones");
+    }
+
+    /**
+     * Checks the dirty status of the 'sequenceID' field. A field is dirty if it
+     * represents a change that has not yet been written to the database.
+     */
+    public boolean isSequenceIDDirty() {
+      throw new java.lang.UnsupportedOperationException(
+          "IsDirty is not supported on tombstones");
+    }
+
+    /**
+     * Gets the value of the 'name' field.
+     */
+    public java.lang.CharSequence getName() {
+      throw new java.lang.UnsupportedOperationException(
+          "Get is not supported on tombstones");
+    }
+
+    /**
+     * Sets the value of the 'name' field.
+     * 
+     * @param value
+     *          the value to set.
+     */
+    public void setName(java.lang.CharSequence value) {
+      throw new java.lang.UnsupportedOperationException(
+          "Set is not supported on tombstones");
+    }
+
+    /**
+     * Checks the dirty status of the 'name' field. A field is dirty if it
+     * represents a change that has not yet been written to the database.
+     */
+    public boolean isNameDirty() {
+      throw new java.lang.UnsupportedOperationException(
+          "IsDirty is not supported on tombstones");
+    }
+
+    /**
+     * Gets the value of the 'data' field.
+     */
+    public java.nio.ByteBuffer getData() {
+      throw new java.lang.UnsupportedOperationException(
+          "Get is not supported on tombstones");
+    }
+
+    /**
+     * Sets the value of the 'data' field.
+     * 
+     * @param value
+     *          the value to set.
+     */
+    public void setData(java.nio.ByteBuffer value) {
+      throw new java.lang.UnsupportedOperationException(
+          "Set is not supported on tombstones");
+    }
+
+    /**
+     * Checks the dirty status of the 'data' field. A field is dirty if it
+     * represents a change that has not yet been written to the database.
+     */
+    public boolean isDataDirty() {
+      throw new java.lang.UnsupportedOperationException(
+          "IsDirty is not supported on tombstones");
+    }
+
   }
-  
-}
 
+}

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/gora/GoraWriter.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/gora/GoraWriter.java b/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/gora/GoraWriter.java
index bdf66f2..a3771f2 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/gora/GoraWriter.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/gora/GoraWriter.java
@@ -47,6 +47,7 @@ public class GoraWriter extends PipelineableWriter {
 
   /**
    * Default constructor for this class.
+   * @throws WriterException if error writing
    */
   public GoraWriter() throws WriterException {
     log.debug("Initializing configuration for GoraWriter pipeline...");
@@ -57,8 +58,7 @@ public class GoraWriter extends PipelineableWriter {
    * {@link org.apache.gora.store.DataStore} objects are created from a factory. It is necessary to 
    * provide the key and value class. The datastore class parameters is optional, 
    * and if not specified it will be read from the <code>gora.properties</code> file.
-   * @throws WriterException
-   * @throws GoraException 
+   * @throws WriterException if error occurs
    * @see org.apache.hadoop.chukwa.datacollection.writer.ChukwaWriter#init(org.apache.hadoop.conf.Configuration)
    */
   @Override

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/dataloader/DataLoaderFactory.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/dataloader/DataLoaderFactory.java b/src/main/java/org/apache/hadoop/chukwa/dataloader/DataLoaderFactory.java
index 3b8b946..a49aaa0 100644
--- a/src/main/java/org/apache/hadoop/chukwa/dataloader/DataLoaderFactory.java
+++ b/src/main/java/org/apache/hadoop/chukwa/dataloader/DataLoaderFactory.java
@@ -35,8 +35,10 @@ public abstract class DataLoaderFactory {
   }
 
   /**
-   * @param args
-   * @throws IOException
+   * @param conf Chukwa Configuration
+   * @param fs Hadoop File System
+   * @param src List of files to load
+   * @throws IOException if error loading files
    */
   public void load(ChukwaConfiguration conf, FileSystem fs, FileStatus[] src) throws IOException {
     this.source=src.clone();

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/dataloader/MetricDataLoader.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/dataloader/MetricDataLoader.java b/src/main/java/org/apache/hadoop/chukwa/dataloader/MetricDataLoader.java
index 5538a40..baf833d 100644
--- a/src/main/java/org/apache/hadoop/chukwa/dataloader/MetricDataLoader.java
+++ b/src/main/java/org/apache/hadoop/chukwa/dataloader/MetricDataLoader.java
@@ -72,7 +72,10 @@ public class MetricDataLoader implements Callable {
     fs = FileSystem.get(conf);
   }
 
-  /** Creates a new instance of DBWriter */
+  /** Creates a new instance of DBWriter 
+   * @param conf Chukwa Configuration
+   * @param fs Hadoop File System
+   * @param fileName Chukwa Sequence file */
   public MetricDataLoader(ChukwaConfiguration conf, FileSystem fs, String fileName) {
     source = new Path(fileName);
     this.conf = conf;

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/datastore/ChukwaHBaseStore.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datastore/ChukwaHBaseStore.java b/src/main/java/org/apache/hadoop/chukwa/datastore/ChukwaHBaseStore.java
index bdd765d..889e980 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datastore/ChukwaHBaseStore.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datastore/ChukwaHBaseStore.java
@@ -111,12 +111,12 @@ public class ChukwaHBaseStore {
    * Scan chukwa table for a particular metric group and metric name based on
    * time ranges.
    * 
-   * @param metricGroup
-   * @param metric
-   * @param source
-   * @param startTime
-   * @param endTime
-   * @return
+   * @param metricGroup metric group name
+   * @param metric metric name
+   * @param source source of the metric
+   * @param startTime start time
+   * @param endTime end time
+   * @return Series object
    */
   public static Series getSeries(String metricGroup, String metric,
       String source, long startTime, long endTime) {
@@ -128,11 +128,11 @@ public class ChukwaHBaseStore {
   /**
    * Scan chukwa table for a full metric name based on time ranges.
    * 
-   * @param metric
-   * @param source
-   * @param startTime
-   * @param endTime
-   * @return
+   * @param metric metric group name and metric name combined
+   * @param source source of the metric
+   * @param startTime start time
+   * @param endTime end time
+   * @return Series object
    */
   public static synchronized Series getSeries(String metric, String source, long startTime,
       long endTime) {
@@ -341,9 +341,9 @@ public class ChukwaHBaseStore {
    * Scan chukwa table and find cluster tag from annotation column family from a
    * range of entries.
    * 
-   * @param startTime
-   * @param endTime
-   * @return
+   * @param startTime start time in epoch
+   * @param endTime start time in epoch
+   * @return Set of cluster names
    */
   public static Set<String> getClusterNames(long startTime, long endTime) {
     Set<String> clusters = new HashSet<String>();
@@ -374,8 +374,8 @@ public class ChukwaHBaseStore {
   /**
    * Get a chart from HBase by ID.
    * 
-   * @param id
-   * @return
+   * @param id Chart ID
+   * @return Chart object
    */
   public static Chart getChart(String id) {
     Chart chart = null;
@@ -400,8 +400,8 @@ public class ChukwaHBaseStore {
   /**
    * Update a chart in HBase by ID.
    * 
-   * @param id
-   * @param chart
+   * @param id Chart ID
+   * @param chart Chart Object
    */
   public static void putChart(String id, Chart chart) {
     try {
@@ -422,7 +422,13 @@ public class ChukwaHBaseStore {
 
   /**
    * Create a chart in HBase by specifying parameters.
-   * @throws URISyntaxException 
+   * @param id is unique chart identifier
+   * @param title is searchable name of the chart
+   * @param metrics is list of metric names to render chart
+   * @param source is data source name
+   * @param yunitType is y axis unit type
+   * @return Chart ID
+   * @throws URISyntaxException if metrics name can not compose valid URL syntax
    */
   public static synchronized String createChart(String id,
       String title, String[] metrics, String source, String yunitType) throws URISyntaxException {
@@ -446,7 +452,14 @@ public class ChukwaHBaseStore {
 
   /**
    * Create a chart in HBase by specifying parameters.
-   * @throws URISyntaxException 
+   * @param id is unique chart identifier
+   * @param title is searchable name of the chart
+   * @param metrics is list of metric names to render ring chart
+   * @param source is data source name
+   * @param suffixLabel is text label to append to metric values
+   * @param direction sets the threshold to have either upper limit or lower limit
+   * @return Chart ID
+   * @throws URISyntaxException if metrics name can not compose valid URL syntax
    */
   public static synchronized String createCircle(String id,
       String title, String[] metrics, String source, String suffixLabel, String direction) throws URISyntaxException {
@@ -469,15 +482,15 @@ public class ChukwaHBaseStore {
 
   /**
    * Create a tile in HBase by specifying parameters.
-   * @param id
-   * @param title
-   * @param bannerText
-   * @param suffixLabel
-   * @param metrics
-   * @param source
-   * @param icon
-   * @return
-   * @throws URISyntaxException
+   * @param id is unique tile identifier
+   * @param title is searchable name of the tile widget
+   * @param bannerText is description of the tile widget
+   * @param suffixLabel is text label to append to metric values
+   * @param metrics is list of metric names to render tile widget
+   * @param source is data source name
+   * @param icon is emoji symbol to render beside tile widget
+   * @return Widget ID
+   * @throws URISyntaxException if metrics name can not compose valid URL syntax
    */
   public static synchronized String createTile(String id, String title, 
       String bannerText, String suffixLabel, String[] metrics, String source, 
@@ -500,9 +513,8 @@ public class ChukwaHBaseStore {
   /**
    * Create a chart in HBase.
    * 
-   * @param chart
+   * @param chart is a chukwa Chart object
    * @return id of newly created chart
-   * @throws IOException
    */
   public static synchronized String createChart(Chart chart) {
     String id = chart.getId();
@@ -538,10 +550,10 @@ public class ChukwaHBaseStore {
   /**
    * Return data for multiple series of metrics stored in HBase.
    * 
-   * @param series
-   * @param startTime
-   * @param endTime
-   * @return
+   * @param series is SeriesMetaData object
+   * @param startTime sets the start time of metrics
+   * @param endTime sets the end time of metrics
+   * @return A list of Series meta data
    */
   public static synchronized ArrayList<org.apache.hadoop.chukwa.hicc.bean.SeriesMetaData> getChartSeries(ArrayList<org.apache.hadoop.chukwa.hicc.bean.SeriesMetaData> series, long startTime, long endTime) {
     ArrayList<org.apache.hadoop.chukwa.hicc.bean.SeriesMetaData> list = new ArrayList<org.apache.hadoop.chukwa.hicc.bean.SeriesMetaData>();
@@ -652,9 +664,9 @@ public class ChukwaHBaseStore {
   /**
    * List widgets stored in HBase.
    * 
-   * @param limit
-   * @param offset
-   * @return
+   * @param limit sets the number of widgets to return
+   * @param offset sets the starting point to return widgets
+   * @return List of Widgets
    */
   public static synchronized List<Widget> listWidget(int limit, int offset) {
     ArrayList<Widget> list = new ArrayList<Widget>();
@@ -738,8 +750,8 @@ public class ChukwaHBaseStore {
   /**
    * Find widget by title prefix in HBase.
    * 
-   * @param query - Prefix query of widget title.
-   * @return
+   * @param query is prefix query of widget title.
+   * @return List of Widgets
    */
   public static synchronized List<Widget> searchWidget(String query) {
     ArrayList<Widget> list = new ArrayList<Widget>();
@@ -773,8 +785,8 @@ public class ChukwaHBaseStore {
   /**
    * View a widget information in HBase.
    * 
-   * @param title - Title of the widget.
-   * @return
+   * @param title is title of the widget.
+   * @return List of Widgets
    */
   public static synchronized Widget viewWidget(String title) {
     Widget w = null;
@@ -798,7 +810,8 @@ public class ChukwaHBaseStore {
   /**
    * Create a widget in HBase.
    * 
-   * @param widget
+   * @param widget is chukwa Widget object
+   * @return true if widget is created
    */
   public static synchronized boolean createWidget(Widget widget) {
     boolean created = false;
@@ -830,9 +843,9 @@ public class ChukwaHBaseStore {
   /**
    * Update a widget in HBase.
    * 
-   * @param title
-   * @param widget
-   * @throws IOException 
+   * @param title is searchable title in a widget
+   * @param widget is Chukwa Widget object
+   * @return true if widget has been updated
    */
   public static synchronized boolean updateWidget(String title, Widget widget) {
     boolean result = false;
@@ -861,9 +874,8 @@ public class ChukwaHBaseStore {
   /**
    * Delete a widget in HBase.
    * 
-   * @param title
-   * @param widget
-   * @throws IOException 
+   * @param title is searchable title in a widget
+   * @return true if widget has been deleted
    */
   public static synchronized boolean deleteWidget(String title) {
     boolean result = false;

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/datatrigger/HttpTriggerAction.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datatrigger/HttpTriggerAction.java b/src/main/java/org/apache/hadoop/chukwa/datatrigger/HttpTriggerAction.java
index db71668..4e94167 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datatrigger/HttpTriggerAction.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datatrigger/HttpTriggerAction.java
@@ -78,11 +78,11 @@ public class HttpTriggerAction implements TriggerAction {
    * Iterates over each URL found, fetched other settings and fires and HTTP
    * request.
    *
-   * @param conf
-   * @param fs
-   * @param src
-   * @param triggerEvent
-   * @throws IOException
+   * @param conf is Chukwa configuration
+   * @param fs is HDFS File System
+   * @param src is list of sources to look for data
+   * @param triggerEvent is type of processing to happen
+   * @throws IOException if error in process triggers
    */
   public void execute(Configuration conf, FileSystem fs,
                       FileStatus[] src, TriggerEvent triggerEvent) throws IOException {

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/extraction/archive/SinkArchiver.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/extraction/archive/SinkArchiver.java b/src/main/java/org/apache/hadoop/chukwa/extraction/archive/SinkArchiver.java
index 6140dca..bb050af 100644
--- a/src/main/java/org/apache/hadoop/chukwa/extraction/archive/SinkArchiver.java
+++ b/src/main/java/org/apache/hadoop/chukwa/extraction/archive/SinkArchiver.java
@@ -148,7 +148,7 @@ public class SinkArchiver implements CHUKWA_CONSTANT {
    * @param fs the filesystem in question
    * @param src a file or directory to merge into dest
    * @param dest a directory to merge into
-   * @throws IOException
+   * @throws IOException if error in promote and merge
    */
   public void promoteAndMerge(FileSystem fs, Path src, Path dest) 
   throws IOException {

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/extraction/demux/DailyChukwaRecordRolling.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/extraction/demux/DailyChukwaRecordRolling.java b/src/main/java/org/apache/hadoop/chukwa/extraction/demux/DailyChukwaRecordRolling.java
index ebdbca5..7af43b4 100644
--- a/src/main/java/org/apache/hadoop/chukwa/extraction/demux/DailyChukwaRecordRolling.java
+++ b/src/main/java/org/apache/hadoop/chukwa/extraction/demux/DailyChukwaRecordRolling.java
@@ -202,8 +202,8 @@ public class DailyChukwaRecordRolling extends Configured implements Tool {
   }
 
   /**
-   * @param args
-   * @throws Exception
+   * @param args is command line parameters
+   * @throws Exception if unable to process data
    */
   public static void main(String[] args) throws Exception {
     

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/extraction/demux/DemuxManager.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/extraction/demux/DemuxManager.java b/src/main/java/org/apache/hadoop/chukwa/extraction/demux/DemuxManager.java
index 9fcb65b..a088546 100644
--- a/src/main/java/org/apache/hadoop/chukwa/extraction/demux/DemuxManager.java
+++ b/src/main/java/org/apache/hadoop/chukwa/extraction/demux/DemuxManager.java
@@ -98,7 +98,7 @@ public class DemuxManager implements CHUKWA_CONSTANT {
 
   /**
    * Start the Demux Manager daemon
-   * @throws Exception
+   * @throws Exception if error in processing data
    */
   public void start() throws Exception {
 

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/extraction/demux/HourlyChukwaRecordRolling.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/extraction/demux/HourlyChukwaRecordRolling.java b/src/main/java/org/apache/hadoop/chukwa/extraction/demux/HourlyChukwaRecordRolling.java
index e272ea8..9b87cfd 100644
--- a/src/main/java/org/apache/hadoop/chukwa/extraction/demux/HourlyChukwaRecordRolling.java
+++ b/src/main/java/org/apache/hadoop/chukwa/extraction/demux/HourlyChukwaRecordRolling.java
@@ -152,8 +152,8 @@ public class HourlyChukwaRecordRolling extends Configured implements Tool {
   }
 
   /**
-   * @param args
-   * @throws Exception
+   * @param args is command line parameters
+   * @throws Exception if error in processing data
    */
   public static void main(String[] args) throws Exception {
     

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/extraction/demux/MoveOrMergeRecordFile.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/extraction/demux/MoveOrMergeRecordFile.java b/src/main/java/org/apache/hadoop/chukwa/extraction/demux/MoveOrMergeRecordFile.java
index 7d83ec8..1d09096 100644
--- a/src/main/java/org/apache/hadoop/chukwa/extraction/demux/MoveOrMergeRecordFile.java
+++ b/src/main/java/org/apache/hadoop/chukwa/extraction/demux/MoveOrMergeRecordFile.java
@@ -140,8 +140,8 @@ public class MoveOrMergeRecordFile extends Configured implements Tool {
   }
 
   /**
-   * @param args
-   * @throws Exception
+   * @param args is command line parameters
+   * @throws Exception if unable to process data
    */
   public static void main(String[] args) throws Exception {
     conf = new ChukwaConfiguration();

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/extraction/demux/MoveToRepository.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/extraction/demux/MoveToRepository.java b/src/main/java/org/apache/hadoop/chukwa/extraction/demux/MoveToRepository.java
index 25d2340..899baa7 100644
--- a/src/main/java/org/apache/hadoop/chukwa/extraction/demux/MoveToRepository.java
+++ b/src/main/java/org/apache/hadoop/chukwa/extraction/demux/MoveToRepository.java
@@ -292,8 +292,8 @@ public class MoveToRepository {
   }
 
   /**
-   * @param args
-   * @throws Exception
+   * @param args is command line parameter
+   * @throws Exception if error in processing data
    */
   public static void main(String[] args) throws Exception {
 

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/ProcessorFactory.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/ProcessorFactory.java b/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/ProcessorFactory.java
index 4f10532..04a7e63 100644
--- a/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/ProcessorFactory.java
+++ b/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/ProcessorFactory.java
@@ -65,6 +65,8 @@ public class ProcessorFactory {
 
   /**
    * Register a specific parser for a {@link ChunkProcessor} implementation.
+   * @param recordType is data type assigned during adaptor creation
+   * @param processor is parser class to process data
    */
   public static synchronized void register(String recordType,
       ChunkProcessor processor) {

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/reducer/ReduceProcessorFactory.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/reducer/ReduceProcessorFactory.java b/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/reducer/ReduceProcessorFactory.java
index 19b3f45..f695368 100644
--- a/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/reducer/ReduceProcessorFactory.java
+++ b/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/reducer/ReduceProcessorFactory.java
@@ -44,6 +44,8 @@ public class ReduceProcessorFactory {
 
   /**
    * Register a specific parser for a {@link ReduceProcessor} implementation.
+   * @param reduceType is data type assigned by mapper
+   * @param processor is parser class for reducer
    */
   public static synchronized void register(String reduceType,
                                            ReduceProcessor processor) {

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/extraction/engine/RecordUtil.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/extraction/engine/RecordUtil.java b/src/main/java/org/apache/hadoop/chukwa/extraction/engine/RecordUtil.java
index 291e509..6d27cda 100644
--- a/src/main/java/org/apache/hadoop/chukwa/extraction/engine/RecordUtil.java
+++ b/src/main/java/org/apache/hadoop/chukwa/extraction/engine/RecordUtil.java
@@ -43,8 +43,10 @@ public class RecordUtil {
     return "undefined";
   }
   /**
-   * Uses a precompiled pattern, so theoretically faster than
+   * Uses a compiled pattern, so theoretically faster than
    * Chunk.getTag().
+   * @param chunk - a chunk of binary blob
+   * @return cluster name
    * 
    */
   public static String getClusterName(Chunk chunk) {

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/extraction/hbase/AbstractProcessor.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/extraction/hbase/AbstractProcessor.java b/src/main/java/org/apache/hadoop/chukwa/extraction/hbase/AbstractProcessor.java
index 4f5f289..eecc9ad 100644
--- a/src/main/java/org/apache/hadoop/chukwa/extraction/hbase/AbstractProcessor.java
+++ b/src/main/java/org/apache/hadoop/chukwa/extraction/hbase/AbstractProcessor.java
@@ -58,11 +58,11 @@ public abstract class AbstractProcessor {
    * Generic metric function to add a metric to HBase with full primary key and
    * source computed.
    * 
-   * @param time
-   * @param metric
-   * @param source
-   * @param value
-   * @param output
+   * @param time is timestamp in epoch
+   * @param metric is metric name
+   * @param source is data source name
+   * @param value is metric value in bytes
+   * @param output is an array list of Put operations
    */
   public void addRecord(long time, String metric, String source, byte[] value,
       ArrayList<Put> output) {
@@ -86,10 +86,8 @@ public abstract class AbstractProcessor {
    * assumes "time" and "source" have been defined and will construct primaryKey
    * only, without recompute time and source md5.
    * 
-   * @param time
-   * @param primaryKey
-   * @param value
-   * @param output
+   * @param metric is metric name
+   * @param value is metric value in bytes
    */
   public void addRecord(String metric, byte[] value) {
     String primaryKey = new StringBuilder(primaryKeyHelper).append(".")
@@ -105,10 +103,10 @@ public abstract class AbstractProcessor {
   /**
    * Process a chunk to store in HBase.
    * 
-   * @param chunk
-   * @param output
-   * @param reporter
-   * @throws Throwable
+   * @param chunk is a Chukwa chunk
+   * @param output is an array of Put operations
+   * @param reporter is a reporter to track progress
+   * @throws Throwable if there is problem parsing data
    */
   public void process(Chunk chunk, ArrayList<Put> output, Reporter reporter)
       throws Throwable {

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/extraction/hbase/ChukwaMetricsProcessor.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/extraction/hbase/ChukwaMetricsProcessor.java b/src/main/java/org/apache/hadoop/chukwa/extraction/hbase/ChukwaMetricsProcessor.java
index 156d9d5..3c134ea 100644
--- a/src/main/java/org/apache/hadoop/chukwa/extraction/hbase/ChukwaMetricsProcessor.java
+++ b/src/main/java/org/apache/hadoop/chukwa/extraction/hbase/ChukwaMetricsProcessor.java
@@ -36,10 +36,10 @@ public class ChukwaMetricsProcessor extends HadoopMetricsProcessor {
   /**
    * Process cluster name and store in HBase.
    * 
-   * @param chunk
-   * @param output
-   * @param reporter
-   * @throws Throwable
+   * @param chunk is a Chukwa data chunk
+   * @param output is a list of Put operations
+   * @param reporter is progress reporter
+   * @throws Throwable if unable to send data
    */
   @Override
   public void process(Chunk chunk, ArrayList<Put> output, Reporter reporter)

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/hicc/rest/ChartController.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/hicc/rest/ChartController.java b/src/main/java/org/apache/hadoop/chukwa/hicc/rest/ChartController.java
index 41ef551..614a161 100644
--- a/src/main/java/org/apache/hadoop/chukwa/hicc/rest/ChartController.java
+++ b/src/main/java/org/apache/hadoop/chukwa/hicc/rest/ChartController.java
@@ -60,7 +60,7 @@ public class ChartController {
    * Render chart using flot.js
    * 
    * @param id Reference ID of Chart stored in HBase chukwa_meta table.
-   * @return html chart widget
+   * @return chart widget
    */
   @GET
   @Path("draw/{id}")
@@ -88,6 +88,8 @@ public class ChartController {
 
   /**
    * Describe chart meta data
+   * @param id Chart ID
+   * @return chart meta data
    */
   @GET
   @Path("describe/{id}")
@@ -102,8 +104,8 @@ public class ChartController {
   /**
    * Create a new chart meta data
    * 
-   * @param chart
-   * @return
+   * @param buffer holds incoming JSON of Chart object
+   * @return Web response code
    */
   @POST
   @Path("save")
@@ -121,8 +123,9 @@ public class ChartController {
   /**
    * Save chart meta data
    * 
-   * @param chart
-   * @return
+   * @param id is unique identifier of Chart object
+   * @param buffer holds incoming JSON of Chart object
+   * @return Web response code
    */
   @PUT
   @Path("save/{id}")
@@ -137,6 +140,8 @@ public class ChartController {
 
   /**
    * Preview a chart
+   * @param buffer holds incoming JSON of Chart object
+   * @return segment of chart HTML output
    */
   @PUT
   @Path("preview")

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/hicc/rest/CirclesController.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/hicc/rest/CirclesController.java b/src/main/java/org/apache/hadoop/chukwa/hicc/rest/CirclesController.java
index 5127b1c..e7dd2a4 100644
--- a/src/main/java/org/apache/hadoop/chukwa/hicc/rest/CirclesController.java
+++ b/src/main/java/org/apache/hadoop/chukwa/hicc/rest/CirclesController.java
@@ -59,9 +59,7 @@ public class CirclesController {
   /**
    * Render circle using jquery circliful.js
    * 
-   * @param title Title of the tile.
-   * @param metric Metric name to lookup in hbase.
-   * @param source Metric source.
+   * @param id Title of the tile.
    * @param invert Toggle to display warning, error color by upper bound or lower bound.
    * @return html circle widget.
    */

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/hicc/rest/SessionController.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/hicc/rest/SessionController.java b/src/main/java/org/apache/hadoop/chukwa/hicc/rest/SessionController.java
index 1441253..be4afb5 100644
--- a/src/main/java/org/apache/hadoop/chukwa/hicc/rest/SessionController.java
+++ b/src/main/java/org/apache/hadoop/chukwa/hicc/rest/SessionController.java
@@ -44,7 +44,8 @@ public class SessionController {
 
   /**
    * Utility to get session attributes
-   * @param id
+   * @param request is HTTP request object
+   * @param id is session key
    * @return session attribute
    */
   @GET

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/ChukwaDailyRollingFileAppender.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/ChukwaDailyRollingFileAppender.java b/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/ChukwaDailyRollingFileAppender.java
index 72b6dfd..fdc49db 100644
--- a/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/ChukwaDailyRollingFileAppender.java
+++ b/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/ChukwaDailyRollingFileAppender.java
@@ -64,7 +64,7 @@ import org.apache.log4j.spi.LoggingEvent;
     option. This pattern should follow the {@link SimpleDateFormat}
     conventions. In particular, you <em>must</em> escape literal text
     within a pair of single quotes. A formatted version of the date
-    pattern is used as the suffix for the rolled file name.
+    pattern is used as the suffix for the rolled file name.</p>
 
     <p>For example, if the <b>File</b> option is set to
     <code>/foo/bar.log</code> and the <b>DatePattern</b> set to
@@ -72,16 +72,16 @@ import org.apache.log4j.spi.LoggingEvent;
     file <code>/foo/bar.log</code> will be copied to
     <code>/foo/bar.log.2001-02-16</code> and logging for 2001-02-17
     will continue in <code>/foo/bar.log</code> until it rolls over
-    the next day.
+    the next day.</p>
 
     <p>Is is possible to specify monthly, weekly, half-daily, daily,
-    hourly, or minutely rollover schedules.
+    hourly, or minutely rollover schedules.</p>
 
-    <p><table border="1" cellpadding="2">
+    <table border="1" cellpadding="2" summary="">
     <tr>
     <th>DatePattern</th>
     <th>Rollover schedule</th>
-    <th>Example</th>
+    <th>Example</th></tr>
 
     <tr>
     <td><code>'.'yyyy-MM</code>
@@ -90,7 +90,7 @@ import org.apache.log4j.spi.LoggingEvent;
     <td>At midnight of May 31st, 2002 <code>/foo/bar.log</code> will be
     copied to <code>/foo/bar.log.2002-05</code>. Logging for the month
     of June will be output to <code>/foo/bar.log</code> until it is
-    also rolled over the next month.
+    also rolled over the next month.</td></tr>
 
     <tr>
     <td><code>'.'yyyy-ww</code>
@@ -102,7 +102,7 @@ import org.apache.log4j.spi.LoggingEvent;
     midnight, June 9th 2002, the file <i>/foo/bar.log</i> will be
     copied to <i>/foo/bar.log.2002-23</i>.  Logging for the 24th week
     of 2002 will be output to <code>/foo/bar.log</code> until it is
-    rolled over the next week.
+    rolled over the next week.</td></tr>
 
     <tr>
     <td><code>'.'yyyy-MM-dd</code>
@@ -112,7 +112,7 @@ import org.apache.log4j.spi.LoggingEvent;
     <td>At midnight, on March 8th, 2002, <code>/foo/bar.log</code> will
     be copied to <code>/foo/bar.log.2002-03-08</code>. Logging for the
     9th day of March will be output to <code>/foo/bar.log</code> until
-    it is rolled over the next day.
+    it is rolled over the next day.</td></tr>
 
     <tr>
     <td><code>'.'yyyy-MM-dd-a</code>
@@ -122,7 +122,7 @@ import org.apache.log4j.spi.LoggingEvent;
     <td>At noon, on March 9th, 2002, <code>/foo/bar.log</code> will be
     copied to <code>/foo/bar.log.2002-03-09-AM</code>. Logging for the
     afternoon of the 9th will be output to <code>/foo/bar.log</code>
-    until it is rolled over at midnight.
+    until it is rolled over at midnight.</td></tr>
 
     <tr>
     <td><code>'.'yyyy-MM-dd-HH</code>
@@ -133,11 +133,11 @@ import org.apache.log4j.spi.LoggingEvent;
     <code>/foo/bar.log</code> will be copied to
     <code>/foo/bar.log.2002-03-09-10</code>. Logging for the 11th hour
     of the 9th of March will be output to <code>/foo/bar.log</code>
-    until it is rolled over at the beginning of the next hour.
+    until it is rolled over at the beginning of the next hour.</td></tr>
 
 
     <tr>
-    <td><code>'.'yyyy-MM-dd-HH-mm</code>
+    <td><code>'.'yyyy-MM-dd-HH-mm</code></td>
 
     <td>Rollover at the beginning of every minute.</td>
 
@@ -145,14 +145,13 @@ import org.apache.log4j.spi.LoggingEvent;
     <code>/foo/bar.log</code> will be copied to
     <code>/foo/bar.log.2001-03-09-10-22</code>. Logging for the minute
     of 11:23 (9th of March) will be output to
-    <code>/foo/bar.log</code> until it is rolled over the next minute.
-
+    <code>/foo/bar.log</code> until it is rolled over the next minute.</td></tr>
     </table>
 
     <p>Do not use the colon ":" character in anywhere in the
     <b>DatePattern</b> option. The text before the colon is interpeted
     as the protocol specificaion of a URL which is probably not what
-    you want. */
+    you want.</p> */
 
 public class ChukwaDailyRollingFileAppender extends FileAppender {
 
@@ -230,6 +229,7 @@ public class ChukwaDailyRollingFileAppender extends FileAppender {
 
   /**
    * The default constructor does nothing.
+   * @throws IOException if constructor initialization error
    */
   public ChukwaDailyRollingFileAppender() throws IOException {
     super();
@@ -239,6 +239,10 @@ public class ChukwaDailyRollingFileAppender extends FileAppender {
      Instantiate a <code>DailyRollingFileAppender</code> and open the
      file designated by <code>filename</code>. The opened filename will
      become the output destination for this appender.
+ * @param layout is logging layout
+ * @param filename is the filename to write logs
+ * @param datePattern is the date pattern of log suffix
+ * @throws IOException if constructor initialization error
 
    */
   public ChukwaDailyRollingFileAppender(Layout layout, String filename,
@@ -253,12 +257,15 @@ public class ChukwaDailyRollingFileAppender extends FileAppender {
   /**
    * The <b>DatePattern</b> takes a string in the same format as expected by
    * {@link SimpleDateFormat}. This options determines the rollover schedule.
+   * @param pattern is date formatting pattern
    */
   public void setDatePattern(String pattern) {
     datePattern = pattern;
   }
 
-  /** Returns the value of the <b>DatePattern</b> option. */
+  /** Returns the value of the <b>DatePattern</b> option. 
+   * @return date pattern
+   */
   public String getDatePattern() {
     return datePattern;
   }

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/rest/resource/ClientTrace.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/rest/resource/ClientTrace.java b/src/main/java/org/apache/hadoop/chukwa/rest/resource/ClientTrace.java
index ca97be1..42fa9e2 100644
--- a/src/main/java/org/apache/hadoop/chukwa/rest/resource/ClientTrace.java
+++ b/src/main/java/org/apache/hadoop/chukwa/rest/resource/ClientTrace.java
@@ -50,11 +50,7 @@ public class ClientTrace {
    * Get a list of the most recent client trace activities.
    * The extracted elements are:
    * 
-   * date   - Timestamp of the activity happened.
-   * action - Operation type: HDFS_READ, HDFS_WRITE, or MAPRED_SHUFFLE.
-   * src    - Source IP address
-   * dest   - Destination IP address
-   * size   - Size of the data payload.
+   * @return list of client trace objects
    * 
    */
   @GET

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/tools/backfilling/BackfillingLoader.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/tools/backfilling/BackfillingLoader.java b/src/main/java/org/apache/hadoop/chukwa/tools/backfilling/BackfillingLoader.java
index b325948..5ae4897 100644
--- a/src/main/java/org/apache/hadoop/chukwa/tools/backfilling/BackfillingLoader.java
+++ b/src/main/java/org/apache/hadoop/chukwa/tools/backfilling/BackfillingLoader.java
@@ -85,8 +85,8 @@ public class BackfillingLoader {
   }
   
   /**
-   * @param args
-   * @throws Exception 
+   * @param args is command line parameters
+   * @throws Exception if problem loading data to HDFS
    */
   public static void main(String[] args) throws Exception {
 

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/util/DumpArchive.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/util/DumpArchive.java b/src/main/java/org/apache/hadoop/chukwa/util/DumpArchive.java
index 743a31c..13f8bfe 100644
--- a/src/main/java/org/apache/hadoop/chukwa/util/DumpArchive.java
+++ b/src/main/java/org/apache/hadoop/chukwa/util/DumpArchive.java
@@ -48,9 +48,9 @@ public class DumpArchive {
   
   static HashMap<String, Integer> counts  = new LinkedHashMap<String, Integer>();
   /**
-   * @param args
-   * @throws URISyntaxException
-   * @throws IOException
+   * @param args is command line parameters
+   * @throws URISyntaxException if problem parsing HDFS URL
+   * @throws IOException if problem access HDFS
    */
   public static void main(String[] args) throws IOException, URISyntaxException {
 

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/util/DumpChunks.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/util/DumpChunks.java b/src/main/java/org/apache/hadoop/chukwa/util/DumpChunks.java
index 11d1d1a..f620074 100644
--- a/src/main/java/org/apache/hadoop/chukwa/util/DumpChunks.java
+++ b/src/main/java/org/apache/hadoop/chukwa/util/DumpChunks.java
@@ -37,10 +37,13 @@ public class DumpChunks {
   
   /**
    * Tries to find chunks matching a given pattern.
-   * Takes as input a set of &-delimited patterns, followed
+   * Takes as input a set of &amp;-delimited patterns, followed
    * by a list of file names.
    * 
-   * E.g:  Dump datatype=Iostat&source=/my/log/.* *.done
+   * E.g:  Dump datatype=Iostat&amp;source=/my/log/.* *.done
+   * @param args is command line parameters
+   * @throws IOException if problem access HDFS
+   * @throws URISyntaxException if error parsing HDFS URL
    */
   public static void main(String[] args) throws IOException, URISyntaxException {
     

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/util/DumpRecord.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/util/DumpRecord.java b/src/main/java/org/apache/hadoop/chukwa/util/DumpRecord.java
index 66cde34..fa76d05 100644
--- a/src/main/java/org/apache/hadoop/chukwa/util/DumpRecord.java
+++ b/src/main/java/org/apache/hadoop/chukwa/util/DumpRecord.java
@@ -31,9 +31,9 @@ import org.apache.hadoop.io.SequenceFile;
 public class DumpRecord {
 
   /**
-   * @param args
-   * @throws URISyntaxException
-   * @throws IOException
+   * @param args is command line parameters
+   * @throws URISyntaxException if problem parsing URL
+   * @throws IOException if problem reading files on HDFS
    */
   public static void main(String[] args) throws IOException, URISyntaxException {
     System.out.println("Input file:" + args[0]);

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/util/HierarchyDataType.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/util/HierarchyDataType.java b/src/main/java/org/apache/hadoop/chukwa/util/HierarchyDataType.java
index 5a6a68b..0bfdfe6 100644
--- a/src/main/java/org/apache/hadoop/chukwa/util/HierarchyDataType.java
+++ b/src/main/java/org/apache/hadoop/chukwa/util/HierarchyDataType.java
@@ -50,7 +50,8 @@ public class HierarchyDataType {
    * @param fs The file system
    * @param path The parent folder
    * @param filter The pattern matcher to filter the required files
-   * @return
+   * @param recursive is a flag to search recursively
+   * @return list of FileStatus
    */
   public static List<FileStatus> globStatus(FileSystem fs, Path path,
       PathFilter filter, boolean recursive) {
@@ -87,7 +88,7 @@ public class HierarchyDataType {
    * List all files under certain path and its sub-directories
    * @param fs The file system
    * @param path  The parent folder
-   * @param recursive
+   * @param recursive is flag to search recursive
    * @return The list of all sub-dirs
    */
   public static List<FileStatus> globStatus(FileSystem fs, Path path,
@@ -136,8 +137,8 @@ public class HierarchyDataType {
   /**
    * Get the directory without first and last slash mark.
    * 
-   * @param datasource
-   * @return
+   * @param datasource is a string
+   * @return same string with ending slash trimmed
    */
   public static String trimSlash(String datasource) {
     String results = datasource;
@@ -154,8 +155,8 @@ public class HierarchyDataType {
    * Transform the hierarchyDatatType directory into its filename (without any
    * slash mark)
    * 
-   * @param datasource
-   * @return
+   * @param datasource is a string
+   * @return path to data source
    */
   public static String getHierarchyDataTypeFileName(String datasource){
     return datasource.replace("/", CHUKWA_CONSTANT.HIERARCHY_CONNECTOR);
@@ -165,8 +166,8 @@ public class HierarchyDataType {
    * Transform the hierarchyDataType filename into its directory name (with
    * slash mark)
    * 
-   * @param datasource
-   * @return
+   * @param datasource is a string
+   * @return path to data source
    */
   public static String getHierarchyDataTypeDirectory(String datasource) {
     return datasource.replace(CHUKWA_CONSTANT.HIERARCHY_CONNECTOR, "/");

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/util/RegexUtil.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/util/RegexUtil.java b/src/main/java/org/apache/hadoop/chukwa/util/RegexUtil.java
index b20d7f6..2126103 100644
--- a/src/main/java/org/apache/hadoop/chukwa/util/RegexUtil.java
+++ b/src/main/java/org/apache/hadoop/chukwa/util/RegexUtil.java
@@ -75,6 +75,7 @@ public class RegexUtil {
      * <p>
      * Consider calling this constructor with the result of
      * {@link RegexUtil#regexError}.
+     * @param pse is PatternSyntaxException object
      */
     public CheckedPatternSyntaxException(PatternSyntaxException pse) {
       this.pse = pse;
@@ -139,6 +140,8 @@ public class RegexUtil {
   /**
    * Returns true if the argument is a syntactically valid regular
    * expression.
+   * @param s is regular expression
+   * @return true if there is a match
    */
   public static boolean isRegex(String s) {
     return isRegex(s, 0);
@@ -147,6 +150,9 @@ public class RegexUtil {
   /**
    * Returns true if the argument is a syntactically valid regular
    * expression with at least the given number of groups.
+   * @param s is regular expression
+   * @param groups is number of groups to match
+   * @return true if there is a match
    */
   /*>>>
   @SuppressWarnings("regex")    // RegexUtil
@@ -165,11 +171,9 @@ public class RegexUtil {
   /**
    * Returns true if the argument is a syntactically valid regular
    * expression.
+   * @param c is a character
+   * @return true if there is a match
    */
-  /*>>>
-  @SuppressWarnings("regex")    // RegexUtil
-  */
-  /*@Pure*/
   public static boolean isRegex(char c) {
     return isRegex(Character.toString(c));
   }
@@ -178,12 +182,10 @@ public class RegexUtil {
    * Returns null if the argument is a syntactically valid regular
    * expression. Otherwise returns a string describing why the argument is
    * not a regex.
+   * @param s is regular expression
+   * @return null if s is a regular expression
    */
-  /*>>>
-  @SuppressWarnings("regex")    // RegexUtil
-  */
-  /*@Pure*/
-  public static /*@Nullable*/ String regexError(String s) {
+  public static String regexError(String s) {
     return regexError(s, 0);
   }
 
@@ -191,12 +193,11 @@ public class RegexUtil {
    * Returns null if the argument is a syntactically valid regular
    * expression with at least the given number of groups. Otherwise returns
    * a string describing why the argument is not a regex.
+   * @param s is regular expression
+   * @param groups is number of groups to match
+   * @return null if s is a regular expression
    */
-  /*>>>
-  @SuppressWarnings("regex")    // RegexUtil
-  */
-  /*@Pure*/
-  public static /*@Nullable*/ String regexError(String s, int groups) {
+  public static String regexError(String s, int groups) {
     try {
       Pattern p = Pattern.compile(s);
       int actualGroups = getGroupCount(p);
@@ -213,12 +214,10 @@ public class RegexUtil {
    * Returns null if the argument is a syntactically valid regular
    * expression. Otherwise returns a PatternSyntaxException describing
    * why the argument is not a regex.
+   * @param s is regular expression
+   * @return null if s is a regular expression
    */
-  /*>>>
-  @SuppressWarnings("regex")    // RegexUtil
-  */
-  /*@Pure*/
-  public static /*@Nullable*/ PatternSyntaxException regexException(String s) {
+  public static PatternSyntaxException regexException(String s) {
     return regexException(s, 0);
   }
 
@@ -226,12 +225,11 @@ public class RegexUtil {
    * Returns null if the argument is a syntactically valid regular
    * expression with at least the given number of groups. Otherwise returns a
    * PatternSyntaxException describing why the argument is not a regex.
+   * @param s is regular expression
+   * @param groups is number of groups to match
+   * @return null if s is a regular expression
    */
-  /*>>>
-  @SuppressWarnings("regex")    // RegexUtil
-  */
-  /*@Pure*/
-  public static /*@Nullable*/ PatternSyntaxException regexException(String s, int groups) {
+  public static PatternSyntaxException regexException(String s, int groups) {
     try {
       Pattern p = Pattern.compile(s);
       int actualGroups = getGroupCount(p);
@@ -249,8 +247,10 @@ public class RegexUtil {
    * otherwise throws an error. The purpose of this method is to suppress Regex
    * Checker warnings. Once the the Regex Checker supports flow-sensitivity, it
    * should be very rarely needed.
+   * @param s is a regular expression
+   * @return null if s is a regular expression
    */
-  public static /*@Regex*/ String asRegex(String s) {
+  public static String asRegex(String s) {
     return asRegex(s, 0);
   }
 
@@ -259,12 +259,11 @@ public class RegexUtil {
    * with at least the given number of groups, otherwise throws an error. The
    * purpose of this method is to suppress Regex Checker warnings. Once the the
    * Regex Checker supports flow-sensitivity, it should be very rarely needed.
+   * @param s is a regular expression
+   * @param groups is number of group to match
+   * @return null if s is a regular expression
    */
-  /*>>>
-  @SuppressWarnings("regex")    // RegexUtil
-  */
-  /*@Pure*/
-  public static /*@Regex*/ String asRegex(String s, int groups) {
+  public static String asRegex(String s, int groups) {
     try {
       Pattern p = Pattern.compile(s);
       int actualGroups = getGroupCount(p);


Mime
View raw message