hama-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From edwardy...@apache.org
Subject svn commit: r744798 [1/2] - in /incubator/hama/trunk: ./ src/examples/org/apache/hama/examples/ src/java/org/apache/hama/ src/java/org/apache/hama/algebra/ src/java/org/apache/hama/io/ src/java/org/apache/hama/mapred/ src/test/org/apache/hama/ src/test...
Date Mon, 16 Feb 2009 04:35:24 GMT
Author: edwardyoon
Date: Mon Feb 16 04:35:23 2009
New Revision: 744798

URL: http://svn.apache.org/viewvc?rev=744798&view=rev
Log:
Replace MapWritable to hadoop.io.MapWritable

Removed:
    incubator/hama/trunk/src/java/org/apache/hama/io/HMapWritable.java
    incubator/hama/trunk/src/java/org/apache/hama/io/VectorMapWritable.java
    incubator/hama/trunk/src/java/org/apache/hama/io/VectorWritable.java
Modified:
    incubator/hama/trunk/CHANGES.txt
    incubator/hama/trunk/src/examples/org/apache/hama/examples/ExampleDriver.java
    incubator/hama/trunk/src/examples/org/apache/hama/examples/FileMatrixBlockMult.java
    incubator/hama/trunk/src/examples/org/apache/hama/examples/RandomMatrix.java
    incubator/hama/trunk/src/java/org/apache/hama/AbstractVector.java
    incubator/hama/trunk/src/java/org/apache/hama/DenseMatrix.java
    incubator/hama/trunk/src/java/org/apache/hama/DenseVector.java
    incubator/hama/trunk/src/java/org/apache/hama/Vector.java
    incubator/hama/trunk/src/java/org/apache/hama/algebra/RowCyclicAdditionMap.java
    incubator/hama/trunk/src/java/org/apache/hama/algebra/RowCyclicAdditionReduce.java
    incubator/hama/trunk/src/java/org/apache/hama/algebra/SIMDMultiplyMap.java
    incubator/hama/trunk/src/java/org/apache/hama/algebra/SIMDMultiplyReduce.java
    incubator/hama/trunk/src/java/org/apache/hama/io/VectorUpdate.java
    incubator/hama/trunk/src/java/org/apache/hama/mapred/CollectBlocksMap.java
    incubator/hama/trunk/src/java/org/apache/hama/mapred/CollectBlocksMapReduceBase.java
    incubator/hama/trunk/src/java/org/apache/hama/mapred/CollectBlocksMapper.java
    incubator/hama/trunk/src/java/org/apache/hama/mapred/CollectBlocksReducer.java
    incubator/hama/trunk/src/java/org/apache/hama/mapred/RandomMatrixMap.java
    incubator/hama/trunk/src/java/org/apache/hama/mapred/RandomMatrixReduce.java
    incubator/hama/trunk/src/java/org/apache/hama/mapred/VectorInputFormat.java
    incubator/hama/trunk/src/test/org/apache/hama/TestDenseMatrix.java
    incubator/hama/trunk/src/test/org/apache/hama/TestDenseVector.java
    incubator/hama/trunk/src/test/org/apache/hama/examples/TestFileMatrixBlockMult.java
    incubator/hama/trunk/src/test/org/apache/hama/mapred/TestMatrixMapReduce.java

Modified: incubator/hama/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/incubator/hama/trunk/CHANGES.txt?rev=744798&r1=744797&r2=744798&view=diff
==============================================================================
--- incubator/hama/trunk/CHANGES.txt (original)
+++ incubator/hama/trunk/CHANGES.txt Mon Feb 16 04:35:23 2009
@@ -35,6 +35,7 @@
     
   IMPROVEMENTS
 
+    HAMA-152: Replace MapWritable to hadoop.io.MapWritable (edwardyoon)
     HAMA-109: Implement add(double alpha, Matrix B) (edwardyoon)
     HAMA-150: Refactor blockingMapRed (edwardyoon)
     HAMA-148: Implement of set(double alpha, Matrix B) (edwardyoon)

Modified: incubator/hama/trunk/src/examples/org/apache/hama/examples/ExampleDriver.java
URL: http://svn.apache.org/viewvc/incubator/hama/trunk/src/examples/org/apache/hama/examples/ExampleDriver.java?rev=744798&r1=744797&r2=744798&view=diff
==============================================================================
--- incubator/hama/trunk/src/examples/org/apache/hama/examples/ExampleDriver.java (original)
+++ incubator/hama/trunk/src/examples/org/apache/hama/examples/ExampleDriver.java Mon Feb 16 04:35:23 2009
@@ -26,7 +26,7 @@
   public static void main(String[] args) {
     ProgramDriver pgd = new ProgramDriver();
     try {
-      pgd.addClass("random", RandomMatrix.class, "Generate matrix with random elements.");
+      pgd.addClass("rand", RandomMatrix.class, "Generate matrix with random elements.");
       pgd.addClass("add", MatrixAddition.class, "Mat-Mat addition.");
       pgd.addClass("mult", MatrixMultiplication.class, "Mat-Mat multiplication.");
       pgd.addClass("multfiles", MatrixMultiplication.class, "file matrices multiplication.");

Modified: incubator/hama/trunk/src/examples/org/apache/hama/examples/FileMatrixBlockMult.java
URL: http://svn.apache.org/viewvc/incubator/hama/trunk/src/examples/org/apache/hama/examples/FileMatrixBlockMult.java?rev=744798&r1=744797&r2=744798&view=diff
==============================================================================
--- incubator/hama/trunk/src/examples/org/apache/hama/examples/FileMatrixBlockMult.java (original)
+++ incubator/hama/trunk/src/examples/org/apache/hama/examples/FileMatrixBlockMult.java Mon Feb 16 04:35:23 2009
@@ -41,7 +41,6 @@
 import org.apache.hama.algebra.BlockMultiplyReduce;
 import org.apache.hama.io.BlockID;
 import org.apache.hama.io.BlockWritable;
-import org.apache.hama.io.VectorWritable;
 import org.apache.hama.mapred.CollectBlocksMap;
 import org.apache.hama.mapred.CollectBlocksMapReduceBase;
 import org.apache.hama.util.JobManager;
@@ -61,33 +60,31 @@
 
     @Override
     public void map(IntWritable key, MapWritable value,
-        OutputCollector<BlockID, VectorWritable> output, Reporter reporter)
+        OutputCollector<BlockID, MapWritable> output, Reporter reporter)
         throws IOException {
-      int startColumn;
-      int endColumn;
-      int blkRow = key.get() / mBlockRowSize;
+      int startColumn, endColumn, blkRow = key.get() / mBlockRowSize, i = 0;
       this.value = value;
-
-      int i = 0;
+      
       do {
         startColumn = i * mBlockColSize;
         endColumn = startColumn + mBlockColSize - 1;
         if (endColumn >= mColumns) // the last sub vector
           endColumn = mColumns - 1;
-        output.collect(new BlockID(blkRow, i), new VectorWritable(key.get(),
-            subVector(startColumn, endColumn)));
+        output.collect(new BlockID(blkRow, i), subVector(key.get(), startColumn, endColumn));
 
         i++;
       } while (endColumn < (mColumns - 1));
     }
 
-    private DenseVector subVector(int i0, int i1) {
+    private MapWritable subVector(int row, int i0, int i1) {
       DenseVector res = new DenseVector();
+      res.setRow(row);
+      
       for (int i = i0; i <= i1; i++) {
         res.set(i, ((DoubleWritable) this.value.get(new IntWritable(i))).get());
       }
 
-      return res;
+      return res.getEntries();
     }
   }
 

Modified: incubator/hama/trunk/src/examples/org/apache/hama/examples/RandomMatrix.java
URL: http://svn.apache.org/viewvc/incubator/hama/trunk/src/examples/org/apache/hama/examples/RandomMatrix.java?rev=744798&r1=744797&r2=744798&view=diff
==============================================================================
--- incubator/hama/trunk/src/examples/org/apache/hama/examples/RandomMatrix.java (original)
+++ incubator/hama/trunk/src/examples/org/apache/hama/examples/RandomMatrix.java Mon Feb 16 04:35:23 2009
@@ -28,7 +28,7 @@
   public static void main(String[] args) throws IOException {
     if (args.length < 3) {
       System.out
-          .println("random  [-m maps] [-r reduces] <rows> <columns> <matrix_name>");
+          .println("rand  [-m maps] [-r reduces] <rows> <columns> <matrix_name>");
       System.exit(-1);
     } else {
       parseArgs(args);

Modified: incubator/hama/trunk/src/java/org/apache/hama/AbstractVector.java
URL: http://svn.apache.org/viewvc/incubator/hama/trunk/src/java/org/apache/hama/AbstractVector.java?rev=744798&r1=744797&r2=744798&view=diff
==============================================================================
--- incubator/hama/trunk/src/java/org/apache/hama/AbstractVector.java (original)
+++ incubator/hama/trunk/src/java/org/apache/hama/AbstractVector.java Mon Feb 16 04:35:23 2009
@@ -21,14 +21,17 @@
 
 import java.util.Iterator;
 
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.MapWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.Writable;
 import org.apache.hama.io.DoubleEntry;
-import org.apache.hama.io.HMapWritable;
 
 /**
  * Methods of the vector classes
  */
 public abstract class AbstractVector {
-  public HMapWritable<Integer, DoubleEntry> entries;
+  protected MapWritable entries;
   
   /**
    * Gets the value of index
@@ -39,7 +42,7 @@
   public double get(int index) {
     double value;
     try {
-      value = this.entries.get(index).getValue();
+      value = ((DoubleEntry) this.entries.get(new IntWritable(index))).getValue();
     } catch (NullPointerException e) {
       throw new NullPointerException("v("+index+") : " + e.toString());
     }
@@ -56,10 +59,10 @@
   public void set(int index, double value) {
     // If entries are null, create new object 
     if(this.entries == null) {
-      this.entries = new HMapWritable<Integer, DoubleEntry>();
+      this.entries = new MapWritable();
     }
     
-    this.entries.put(index, new DoubleEntry(value));
+    this.entries.put(new IntWritable(index), new DoubleEntry(value));
   }
   
   /**
@@ -77,7 +80,7 @@
    * 
    * @return iterator
    */
-  public Iterator<DoubleEntry> iterator() {
+  public Iterator<Writable> iterator() {
     return this.entries.values().iterator();
   }
   
@@ -87,15 +90,19 @@
    * @return a size of vector
    */
   public int size() {
-    return (this.entries != null) ? this.entries.size() : 0;
+    int x = 0;
+    if(this.entries != null && this.entries.containsKey(new Text("row"))) 
+      x = 1;
+    
+    return (this.entries != null) ? this.entries.size() - x : 0;
   }
   
   /**
-   * Returns the {@link org.apache.hama.io.HMapWritable}
+   * Returns the {@link org.apache.hadoop.io.MapWritable}
    * 
    * @return the entries of vector
    */
-  public HMapWritable<Integer, DoubleEntry> getEntries() {
+  public MapWritable getEntries() {
     return this.entries;
   }
 }

Modified: incubator/hama/trunk/src/java/org/apache/hama/DenseMatrix.java
URL: http://svn.apache.org/viewvc/incubator/hama/trunk/src/java/org/apache/hama/DenseMatrix.java?rev=744798&r1=744797&r2=744798&view=diff
==============================================================================
--- incubator/hama/trunk/src/java/org/apache/hama/DenseMatrix.java (original)
+++ incubator/hama/trunk/src/java/org/apache/hama/DenseMatrix.java Mon Feb 16 04:35:23 2009
@@ -1,608 +1,607 @@
-/**
- * Copyright 2007 The Apache Software Foundation
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hama;
-
-import java.io.IOException;
-import java.util.Iterator;
-
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.Scanner;
-import org.apache.hadoop.hbase.io.Cell;
-import org.apache.hadoop.hbase.io.RowResult;
-import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.SequenceFile;
-import org.apache.hadoop.io.SequenceFile.CompressionType;
-import org.apache.hadoop.mapred.FileInputFormat;
-import org.apache.hadoop.mapred.JobClient;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.SequenceFileInputFormat;
-import org.apache.hama.algebra.BlockMultiplyMap;
-import org.apache.hama.algebra.BlockMultiplyReduce;
-import org.apache.hama.algebra.RowCyclicAdditionMap;
-import org.apache.hama.algebra.RowCyclicAdditionReduce;
-import org.apache.hama.algebra.SIMDMultiplyMap;
-import org.apache.hama.algebra.SIMDMultiplyReduce;
-import org.apache.hama.io.BlockID;
-import org.apache.hama.io.BlockWritable;
-import org.apache.hama.io.DoubleEntry;
-import org.apache.hama.io.HMapWritable;
-import org.apache.hama.io.VectorUpdate;
-import org.apache.hama.io.VectorWritable;
-import org.apache.hama.mapred.CollectBlocksMapper;
-import org.apache.hama.mapred.RandomMatrixMap;
-import org.apache.hama.mapred.RandomMatrixReduce;
-import org.apache.hama.mapred.VectorInputFormat;
-import org.apache.hama.util.BytesUtil;
-import org.apache.hama.util.JobManager;
-import org.apache.hama.util.RandomVariable;
-
-/**
- * This class represents a dense matrix.
- */
-public class DenseMatrix extends AbstractMatrix implements Matrix {
-  static int tryPathLength = Constants.DEFAULT_PATH_LENGTH;
-  static final String TABLE_PREFIX = DenseMatrix.class.getSimpleName() + "_";
-  static private final Path TMP_DIR = new Path(DenseMatrix.class
-      .getSimpleName()
-      + "_TMP_dir");
-
-  /**
-   * Construct a raw matrix. Just create a table in HBase, but didn't lay any
-   * schema ( such as dimensions: i, j ) on it.
-   * 
-   * @param conf configuration object
-   * @throws IOException throw the exception to let the user know what happend,
-   *                 if we didn't create the matrix successfully.
-   */
-  public DenseMatrix(HamaConfiguration conf) throws IOException {
-    setConfiguration(conf);
-
-    tryToCreateTable();
-
-    closed = false;
-  }
-
-  /**
-   * Create/load a matrix aliased as 'matrixName'.
-   * 
-   * @param conf configuration object
-   * @param matrixName the name of the matrix
-   * @param force if force is true, a new matrix will be created no matter
-   *                'matrixName' has aliased to an existed matrix; otherwise,
-   *                just try to load an existed matrix alised 'matrixName'.
-   * @throws IOException
-   */
-  public DenseMatrix(HamaConfiguration conf, String matrixName, boolean force)
-      throws IOException {
-    setConfiguration(conf);
-    // if force is set to true:
-    // 1) if this matrixName has aliase to other matrix, we will remove
-    // the old aliase, create a new matrix table, and aliase to it.
-    
-    // 2) if this matrixName has no aliase to other matrix, we will create
-    // a new matrix table, and alise to it.
-    //
-    // if force is set to false, we just try to load an existed matrix alised
-    // as 'matrixname'.
-
-    boolean existed = hamaAdmin.matrixExists(matrixName);
-
-    if (force) {
-      if (existed) {
-        // remove the old aliase
-        hamaAdmin.delete(matrixName);
-      }
-      // create a new matrix table.
-      tryToCreateTable();
-      // save the new aliase relationship
-      save(matrixName);
-    } else {
-      if (existed) {
-        // try to get the actual path of the table
-        matrixPath = hamaAdmin.getPath(matrixName);
-        // load the matrix
-        table = new HTable(conf, matrixPath);
-        // increment the reference
-        incrementAndGetRef();
-      } else {
-        throw new IOException("Try to load non-existed matrix alised as "
-            + matrixName);
-      }
-    }
-
-    closed = false;
-  }
-
-  /**
-   * Load a matrix from an existed matrix table whose tablename is 'matrixpath' !!
-   * It is an internal used for map/reduce.
-   * 
-   * @param conf configuration object
-   * @param matrixpath
-   * @throws IOException
-   * @throws IOException
-   */
-  public DenseMatrix(HamaConfiguration conf, String matrixpath)
-      throws IOException {
-    setConfiguration(conf);
-    matrixPath = matrixpath;
-    // load the matrix
-    table = new HTable(conf, matrixPath);
-    // TODO: now we don't increment the reference of the table
-    // for it's an internal use for map/reduce.
-    // if we want to increment the reference of the table,
-    // we don't know where to call Matrix.close in Add & Mul map/reduce
-    // process to decrement the reference. It seems difficulty.
-  }
-
-  /**
-   * Create an m-by-n constant matrix.
-   * 
-   * @param conf configuration object
-   * @param m the number of rows.
-   * @param n the number of columns.
-   * @param s fill the matrix with this scalar value.
-   * @throws IOException throw the exception to let the user know what happend,
-   *                 if we didn't create the matrix successfully.
-   */
-  public DenseMatrix(HamaConfiguration conf, int m, int n, double s)
-      throws IOException {
-    setConfiguration(conf);
-
-    tryToCreateTable();
-
-    closed = false;
-
-    for (int i = 0; i < m; i++) {
-      for (int j = 0; j < n; j++) {
-        set(i, j, s);
-      }
-    }
-
-    setDimension(m, n);
-  }
-
-  /**
-   * try to create a new matrix with a new random name. try times will be
-   * (Integer.MAX_VALUE - 4) * DEFAULT_TRY_TIMES;
-   * 
-   * @throws IOException
-   */
-  private void tryToCreateTable() throws IOException {
-    int tryTimes = Constants.DEFAULT_TRY_TIMES;
-    do {
-      matrixPath = TABLE_PREFIX + RandomVariable.randMatrixPath(tryPathLength);
-
-      if (!admin.tableExists(matrixPath)) { // no table 'matrixPath' in hbase.
-        tableDesc = new HTableDescriptor(matrixPath);
-        create();
-        return;
-      }
-
-      tryTimes--;
-      if (tryTimes <= 0) { // this loop has exhausted DEFAULT_TRY_TIMES.
-        tryPathLength++;
-        tryTimes = Constants.DEFAULT_TRY_TIMES;
-      }
-
-    } while (tryPathLength <= Constants.DEFAULT_MAXPATHLEN);
-    // exhaustes the try times.
-    // throw out an IOException to let the user know what happened.
-    throw new IOException("Try too many times to create a table in hbase.");
-  }
-
-  /**
-   * Generate matrix with random elements
-   * 
-   * @param conf configuration object
-   * @param m the number of rows.
-   * @param n the number of columns.
-   * @return an m-by-n matrix with uniformly distributed random elements.
-   * @throws IOException
-   */
-  public static DenseMatrix random(HamaConfiguration conf, int m, int n)
-      throws IOException {
-    DenseMatrix rand = new DenseMatrix(conf);
-    DenseVector vector = new DenseVector();
-    LOG.info("Create the " + m + " * " + n + " random matrix : "
-        + rand.getPath());
-
-    for (int i = 0; i < m; i++) {
-      vector.clear();
-      for (int j = 0; j < n; j++) {
-        vector.set(j, RandomVariable.rand());
-      }
-      rand.setRow(i, vector);
-    }
-
-    rand.setDimension(m, n);
-    return rand;
-  }
-
-  /**
-   * Generate matrix with random elements using Map/Reduce
-   * 
-   * @param conf configuration object
-   * @param m the number of rows.
-   * @param n the number of columns.
-   * @return an m-by-n matrix with uniformly distributed random elements.
-   * @throws IOException
-   */
-  public static DenseMatrix random_mapred(HamaConfiguration conf, int m, int n)
-      throws IOException {
-    DenseMatrix rand = new DenseMatrix(conf);
-    LOG.info("Create the " + m + " * " + n + " random matrix : "
-        + rand.getPath());
-    rand.setDimension(m, n);
-
-    JobConf jobConf = new JobConf(conf);
-    jobConf.setJobName("random matrix MR job : " + rand.getPath());
-
-    jobConf.setNumMapTasks(conf.getNumMapTasks());
-    jobConf.setNumReduceTasks(conf.getNumReduceTasks());
-
-    final Path inDir = new Path(TMP_DIR, "in");
-    FileInputFormat.setInputPaths(jobConf, inDir);
-    jobConf.setMapperClass(RandomMatrixMap.class);
-    jobConf.setMapOutputKeyClass(IntWritable.class);
-    jobConf.setMapOutputValueClass(VectorWritable.class);
-
-    RandomMatrixReduce.initJob(rand.getPath(), RandomMatrixReduce.class,
-        jobConf);
-    jobConf.setSpeculativeExecution(false);
-    jobConf.set("matrix.column", String.valueOf(n));
-
-    jobConf.setInputFormat(SequenceFileInputFormat.class);
-    final FileSystem fs = FileSystem.get(jobConf);
-    int interval = m / conf.getNumMapTasks();
-
-    // generate an input file for each map task
-    for (int i = 0; i < conf.getNumMapTasks(); ++i) {
-      final Path file = new Path(inDir, "part" + i);
-      final IntWritable start = new IntWritable(i * interval);
-      IntWritable end = null;
-      if ((i + 1) != conf.getNumMapTasks()) {
-        end = new IntWritable(((i * interval) + interval) - 1);
-      } else {
-        end = new IntWritable(m - 1);
-      }
-      final SequenceFile.Writer writer = SequenceFile.createWriter(fs, jobConf,
-          file, IntWritable.class, IntWritable.class, CompressionType.NONE);
-      try {
-        writer.append(start, end);
-      } finally {
-        writer.close();
-      }
-      System.out.println("Wrote input for Map #" + i);
-    }
-
-    JobClient.runJob(jobConf);
-    fs.delete(TMP_DIR, true);
-    return rand;
-  }
-
-  /**
-   * Generate identity matrix
-   * 
-   * @param conf configuration object
-   * @param m the number of rows.
-   * @param n the number of columns.
-   * @return an m-by-n matrix with ones on the diagonal and zeros elsewhere.
-   * @throws IOException
-   */
-  public static Matrix identity(HamaConfiguration conf, int m, int n)
-      throws IOException {
-    Matrix identity = new DenseMatrix(conf);
-    LOG.info("Create the " + m + " * " + n + " identity matrix : "
-        + identity.getPath());
-
-    for (int i = 0; i < m; i++) {
-      DenseVector vector = new DenseVector();
-      for (int j = 0; j < n; j++) {
-        vector.set(j, (i == j ? 1.0 : 0.0));
-      }
-      identity.setRow(i, vector);
-    }
-
-    identity.setDimension(m, n);
-    return identity;
-  }
-
-  /**
-   * Gets the double value of (i, j)
-   * 
-   * @param i ith row of the matrix
-   * @param j jth column of the matrix
-   * @return the value of entry, or zero If entry is null
-   * @throws IOException
-   */
-  public double get(int i, int j) throws IOException {
-    Cell c = table.get(BytesUtil.getRowIndex(i), BytesUtil.getColumnIndex(j));
-    return (c != null) ? BytesUtil.bytesToDouble(c.getValue()) : 0;
-  }
-
-  /**
-   * Gets the vector of row
-   * 
-   * @param i the row index of the matrix
-   * @return the vector of row
-   * @throws IOException
-   */
-  public DenseVector getRow(int i) throws IOException {
-    return new DenseVector(table.getRow(BytesUtil.getRowIndex(i)));
-  }
-  
-  /**
-   * Gets the vector of column
-   * 
-   * @param j the column index of the matrix
-   * @return the vector of column
-   * @throws IOException
-   */
-  public DenseVector getColumn(int j) throws IOException {
-    byte[] columnKey = BytesUtil.getColumnIndex(j);
-    byte[][] c = { columnKey };
-    Scanner scan = table.getScanner(c, HConstants.EMPTY_START_ROW);
-
-    HMapWritable<Integer, DoubleEntry> trunk = new HMapWritable<Integer, DoubleEntry>();
-
-    for (RowResult row : scan) {
-      trunk.put(BytesUtil.bytesToInt(row.getRow()), new DoubleEntry(row
-          .get(columnKey)));
-    }
-
-    return new DenseVector(trunk);
-  }
-
-  /**
-   * Set the row of a matrix to a given vector
-   * 
-   * @param row
-   * @param vector
-   * @throws IOException
-   */
-  public void setRow(int row, Vector vector) throws IOException {
-    VectorUpdate update = new VectorUpdate(row);
-    update.putAll(((DenseVector) vector).getEntries().entrySet());
-    table.commit(update.getBatchUpdate());
-  }
-
-  /**
-   * Set the column of a matrix to a given vector
-   * 
-   * @param column
-   * @param vector
-   * @throws IOException
-   */
-  public void setColumn(int column, Vector vector) throws IOException {
-    for (int i = 0; i < vector.size(); i++) {
-      VectorUpdate update = new VectorUpdate(i);
-      update.put(column, vector.get(i));
-      table.commit(update.getBatchUpdate());
-    }
-  }
-  
-  /**
-   * A = B + A
-   * 
-   * @param B
-   * @return A
-   * @throws IOException
-   */
-  public Matrix add(Matrix B) throws IOException {
-    Matrix result = new DenseMatrix(config);
-    JobConf jobConf = new JobConf(config);
-    jobConf.setJobName("addition MR job" + result.getPath());
-
-    jobConf.setNumMapTasks(config.getNumMapTasks());
-    jobConf.setNumReduceTasks(config.getNumReduceTasks());
-
-    RowCyclicAdditionMap.initJob(this.getPath(), B.getPath(),
-        RowCyclicAdditionMap.class, IntWritable.class, VectorWritable.class,
-        jobConf);
-    RowCyclicAdditionReduce.initJob(result.getPath(),
-        RowCyclicAdditionReduce.class, jobConf);
-
-    JobManager.execute(jobConf, result);
-    return result;
-  }
-
-  /**
-   * A = alpha*B + A
-   * 
-   * @param alpha
-   * @param B
-   * @return A
-   * @throws IOException
-   */
-  public Matrix add(double alpha, Matrix B) throws IOException {
-    Matrix temp = new DenseMatrix(config);
-    temp.set(alpha, B);
-    
-    Matrix result = this.add(temp);
-    return result;
-  }
-  
-  /**
-   * C = A*B using SIMD algorithm
-   * 
-   * @param B
-   * @return C
-   * @throws IOException
-   */
-  public Matrix mult(Matrix B) throws IOException {
-    Matrix result = new DenseMatrix(config);
-
-    JobConf jobConf = new JobConf(config);
-    jobConf.setJobName("multiplication MR job : " + result.getPath());
-
-    jobConf.setNumMapTasks(config.getNumMapTasks());
-    jobConf.setNumReduceTasks(config.getNumReduceTasks());
-
-    SIMDMultiplyMap.initJob(this.getPath(), B.getPath(), SIMDMultiplyMap.class,
-        IntWritable.class, VectorWritable.class, jobConf);
-    SIMDMultiplyReduce.initJob(result.getPath(), SIMDMultiplyReduce.class,
-        jobConf);
-    JobManager.execute(jobConf, result);
-    return result;
-  }
-
-  /**
-   * C = A * B using Blocking algorithm
-   * 
-   * @param B
-   * @param blocks the number of blocks
-   * @return C
-   * @throws IOException
-   */
-  public Matrix mult(Matrix B, int blocks) throws IOException {
-    Matrix collectionTable = new DenseMatrix(config);
-    LOG.info("Collect Blocks");
-
-    collectBlocksMapRed(this.getPath(), collectionTable, blocks, true);
-    collectBlocksMapRed(B.getPath(), collectionTable, blocks, false);
-    
-    Matrix result = new DenseMatrix(config);
-
-    JobConf jobConf = new JobConf(config);
-    jobConf.setJobName("multiplication MR job : " + result.getPath());
-
-    jobConf.setNumMapTasks(config.getNumMapTasks());
-    jobConf.setNumReduceTasks(config.getNumReduceTasks());
-
-    BlockMultiplyMap.initJob(collectionTable.getPath(), BlockMultiplyMap.class,
-        BlockID.class, BlockWritable.class, jobConf);
-    BlockMultiplyReduce.initJob(result.getPath(), BlockMultiplyReduce.class,
-        jobConf);
-
-    JobManager.execute(jobConf, result);
-    // Should be collectionTable removed?
-    return result;
-  }
-
-  /**
-   * C = alpha*A*B + C
-   * 
-   * @param alpha
-   * @param B
-   * @param C
-   * @return C
-   * @throws IOException
-   */
-  public Matrix multAdd(double alpha, Matrix B, Matrix C) throws IOException {
-    // TODO Auto-generated method stub
-    return null;
-  }
-
-  /**
-   * Computes the given norm of the matrix
-   * 
-   * @param type
-   * @return norm of the matrix
-   * @throws IOException
-   */
-  public double norm(Norm type) throws IOException {
-    // TODO Auto-generated method stub
-    return 0;
-  }
-
-  /**
-   * Returns type of matrix
-   */
-  public String getType() {
-    return this.getClass().getSimpleName();
-  }
-
-  /**
-   * Returns the sub matrix formed by selecting certain rows and
-   * columns from a bigger matrix. The sub matrix is a in-memory operation only.
-   * 
-   * @param i0 the start index of row
-   * @param i1 the end index of row
-   * @param j0 the start index of column
-   * @param j1 the end index of column
-   * @return the sub matrix of matrix
-   * @throws IOException
-   */
-  public SubMatrix subMatrix(int i0, int i1, int j0, int j1) throws IOException {
-    int columnSize = (j1 - j0) + 1;
-    SubMatrix result = new SubMatrix((i1 - i0) + 1, columnSize);
-
-    byte[][] cols = new byte[columnSize][];
-    for (int j = j0, jj = 0; j <= j1; j++, jj++) {
-      cols[jj] = BytesUtil.getColumnIndex(j);
-    }
-
-    Scanner scan = table.getScanner(cols, BytesUtil.getRowIndex(i0), BytesUtil
-        .getRowIndex(i1 + 1));
-    Iterator<RowResult> it = scan.iterator();
-    int i = 0;
-    RowResult rs = null;
-    while (it.hasNext()) {
-      rs = it.next();
-      for (int j = j0, jj = 0; j <= j1; j++, jj++) {
-        result.set(i, jj, rs.get(BytesUtil.getColumnIndex(j)).getValue());
-      }
-      i++;
-    }
-
-    scan.close();
-    return result;
-  }
-
-  /**
-   * Collect Blocks
-   * 
-   * @param path a input path
-   * @param collectionTable the collection table
-   * @param blockNum the number of blocks
-   * @param bool
-   * @throws IOException
-   */
-  public void collectBlocksMapRed(String path, Matrix collectionTable,
-      int blockNum, boolean bool) throws IOException {
-    double blocks = Math.pow(blockNum, 0.5);
-    if (!String.valueOf(blocks).endsWith(".0"))
-      throw new IOException("can't divide.");
-
-    int block_size = (int) blocks;
-    collectionTable.setDimension(block_size, block_size);
-
-    JobConf jobConf = new JobConf(config);
-    jobConf.setJobName("Blocking MR job" + getPath());
-
-    jobConf.setNumMapTasks(config.getNumMapTasks());
-    jobConf.setNumReduceTasks(config.getNumReduceTasks());
-    jobConf.setMapperClass(CollectBlocksMapper.class);
-    jobConf.setInputFormat(VectorInputFormat.class);
-    jobConf.set(VectorInputFormat.COLUMN_LIST, Constants.COLUMN);
-
-    FileInputFormat.addInputPaths(jobConf, path);
-
-    CollectBlocksMapper.initJob(collectionTable.getPath(), bool, block_size,
-        this.getRows(), this.getColumns(), jobConf);
-
-    JobManager.execute(jobConf);
-  }
-}
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hama;
+
+import java.io.IOException;
+import java.util.Iterator;
+
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Scanner;
+import org.apache.hadoop.hbase.io.Cell;
+import org.apache.hadoop.hbase.io.RowResult;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.MapWritable;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.io.SequenceFile.CompressionType;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.SequenceFileInputFormat;
+import org.apache.hama.algebra.BlockMultiplyMap;
+import org.apache.hama.algebra.BlockMultiplyReduce;
+import org.apache.hama.algebra.RowCyclicAdditionMap;
+import org.apache.hama.algebra.RowCyclicAdditionReduce;
+import org.apache.hama.algebra.SIMDMultiplyMap;
+import org.apache.hama.algebra.SIMDMultiplyReduce;
+import org.apache.hama.io.BlockID;
+import org.apache.hama.io.BlockWritable;
+import org.apache.hama.io.DoubleEntry;
+import org.apache.hama.io.VectorUpdate;
+import org.apache.hama.mapred.CollectBlocksMapper;
+import org.apache.hama.mapred.RandomMatrixMap;
+import org.apache.hama.mapred.RandomMatrixReduce;
+import org.apache.hama.mapred.VectorInputFormat;
+import org.apache.hama.util.BytesUtil;
+import org.apache.hama.util.JobManager;
+import org.apache.hama.util.RandomVariable;
+
+/**
+ * This class represents a dense matrix.
+ */
+public class DenseMatrix extends AbstractMatrix implements Matrix {
+  static int tryPathLength = Constants.DEFAULT_PATH_LENGTH;
+  static final String TABLE_PREFIX = DenseMatrix.class.getSimpleName() + "_";
+  static private final Path TMP_DIR = new Path(DenseMatrix.class
+      .getSimpleName()
+      + "_TMP_dir");
+
+  /**
+   * Construct a raw matrix. Just create a table in HBase, but didn't lay any
+   * schema ( such as dimensions: i, j ) on it.
+   * 
+   * @param conf configuration object
+   * @throws IOException throw the exception to let the user know what happend,
+   *                 if we didn't create the matrix successfully.
+   */
+  public DenseMatrix(HamaConfiguration conf) throws IOException {
+    setConfiguration(conf);
+
+    tryToCreateTable();
+
+    closed = false;
+  }
+
+  /**
+   * Create/load a matrix aliased as 'matrixName'.
+   * 
+   * @param conf configuration object
+   * @param matrixName the name of the matrix
+   * @param force if force is true, a new matrix will be created no matter
+   *                'matrixName' has aliased to an existed matrix; otherwise,
+   *                just try to load an existed matrix alised 'matrixName'.
+   * @throws IOException
+   */
+  public DenseMatrix(HamaConfiguration conf, String matrixName, boolean force)
+      throws IOException {
+    setConfiguration(conf);
+    // if force is set to true:
+    // 1) if this matrixName has aliase to other matrix, we will remove
+    // the old aliase, create a new matrix table, and aliase to it.
+    
+    // 2) if this matrixName has no aliase to other matrix, we will create
+    // a new matrix table, and alise to it.
+    //
+    // if force is set to false, we just try to load an existed matrix alised
+    // as 'matrixname'.
+
+    boolean existed = hamaAdmin.matrixExists(matrixName);
+
+    if (force) {
+      if (existed) {
+        // remove the old aliase
+        hamaAdmin.delete(matrixName);
+      }
+      // create a new matrix table.
+      tryToCreateTable();
+      // save the new aliase relationship
+      save(matrixName);
+    } else {
+      if (existed) {
+        // try to get the actual path of the table
+        matrixPath = hamaAdmin.getPath(matrixName);
+        // load the matrix
+        table = new HTable(conf, matrixPath);
+        // increment the reference
+        incrementAndGetRef();
+      } else {
+        throw new IOException("Try to load non-existed matrix alised as "
+            + matrixName);
+      }
+    }
+
+    closed = false;
+  }
+
+  /**
+   * Load a matrix from an existed matrix table whose tablename is 'matrixpath' !!
+   * It is an internal used for map/reduce.
+   * 
+   * @param conf configuration object
+   * @param matrixpath
+   * @throws IOException
+   * @throws IOException
+   */
+  public DenseMatrix(HamaConfiguration conf, String matrixpath)
+      throws IOException {
+    setConfiguration(conf);
+    matrixPath = matrixpath;
+    // load the matrix
+    table = new HTable(conf, matrixPath);
+    // TODO: now we don't increment the reference of the table
+    // for it's an internal use for map/reduce.
+    // if we want to increment the reference of the table,
+    // we don't know where to call Matrix.close in Add & Mul map/reduce
+    // process to decrement the reference. It seems difficulty.
+  }
+
+  /**
+   * Create an m-by-n constant matrix.
+   * 
+   * @param conf configuration object
+   * @param m the number of rows.
+   * @param n the number of columns.
+   * @param s fill the matrix with this scalar value.
+   * @throws IOException throw the exception to let the user know what happend,
+   *                 if we didn't create the matrix successfully.
+   */
+  public DenseMatrix(HamaConfiguration conf, int m, int n, double s)
+      throws IOException {
+    setConfiguration(conf);
+
+    tryToCreateTable();
+
+    closed = false;
+
+    for (int i = 0; i < m; i++) {
+      for (int j = 0; j < n; j++) {
+        set(i, j, s);
+      }
+    }
+
+    setDimension(m, n);
+  }
+
+  /**
+   * try to create a new matrix with a new random name. try times will be
+   * (Integer.MAX_VALUE - 4) * DEFAULT_TRY_TIMES;
+   * 
+   * @throws IOException
+   */
+  private void tryToCreateTable() throws IOException {
+    int tryTimes = Constants.DEFAULT_TRY_TIMES;
+    do {
+      matrixPath = TABLE_PREFIX + RandomVariable.randMatrixPath(tryPathLength);
+
+      if (!admin.tableExists(matrixPath)) { // no table 'matrixPath' in hbase.
+        tableDesc = new HTableDescriptor(matrixPath);
+        create();
+        return;
+      }
+
+      tryTimes--;
+      if (tryTimes <= 0) { // this loop has exhausted DEFAULT_TRY_TIMES.
+        tryPathLength++;
+        tryTimes = Constants.DEFAULT_TRY_TIMES;
+      }
+
+    } while (tryPathLength <= Constants.DEFAULT_MAXPATHLEN);
+    // exhaustes the try times.
+    // throw out an IOException to let the user know what happened.
+    throw new IOException("Try too many times to create a table in hbase.");
+  }
+
+  /**
+   * Generate matrix with random elements
+   * 
+   * @param conf configuration object
+   * @param m the number of rows.
+   * @param n the number of columns.
+   * @return an m-by-n matrix with uniformly distributed random elements.
+   * @throws IOException
+   */
+  public static DenseMatrix random(HamaConfiguration conf, int m, int n)
+      throws IOException {
+    DenseMatrix rand = new DenseMatrix(conf);
+    DenseVector vector = new DenseVector();
+    LOG.info("Create the " + m + " * " + n + " random matrix : "
+        + rand.getPath());
+
+    for (int i = 0; i < m; i++) {
+      vector.clear();
+      for (int j = 0; j < n; j++) {
+        vector.set(j, RandomVariable.rand());
+      }
+      rand.setRow(i, vector);
+    }
+
+    rand.setDimension(m, n);
+    return rand;
+  }
+
+  /**
+   * Generate matrix with random elements using Map/Reduce
+   * 
+   * @param conf configuration object
+   * @param m the number of rows.
+   * @param n the number of columns.
+   * @return an m-by-n matrix with uniformly distributed random elements.
+   * @throws IOException
+   */
+  public static DenseMatrix random_mapred(HamaConfiguration conf, int m, int n)
+      throws IOException {
+    DenseMatrix rand = new DenseMatrix(conf);
+    LOG.info("Create the " + m + " * " + n + " random matrix : "
+        + rand.getPath());
+    rand.setDimension(m, n);
+
+    JobConf jobConf = new JobConf(conf);
+    jobConf.setJobName("random matrix MR job : " + rand.getPath());
+
+    jobConf.setNumMapTasks(conf.getNumMapTasks());
+    jobConf.setNumReduceTasks(conf.getNumReduceTasks());
+
+    final Path inDir = new Path(TMP_DIR, "in");
+    FileInputFormat.setInputPaths(jobConf, inDir);
+    jobConf.setMapperClass(RandomMatrixMap.class);
+    jobConf.setMapOutputKeyClass(IntWritable.class);
+    jobConf.setMapOutputValueClass(MapWritable.class);
+
+    RandomMatrixReduce.initJob(rand.getPath(), RandomMatrixReduce.class,
+        jobConf);
+    jobConf.setSpeculativeExecution(false);
+    jobConf.set("matrix.column", String.valueOf(n));
+
+    jobConf.setInputFormat(SequenceFileInputFormat.class);
+    final FileSystem fs = FileSystem.get(jobConf);
+    int interval = m / conf.getNumMapTasks();
+
+    // generate an input file for each map task
+    for (int i = 0; i < conf.getNumMapTasks(); ++i) {
+      final Path file = new Path(inDir, "part" + i);
+      final IntWritable start = new IntWritable(i * interval);
+      IntWritable end = null;
+      if ((i + 1) != conf.getNumMapTasks()) {
+        end = new IntWritable(((i * interval) + interval) - 1);
+      } else {
+        end = new IntWritable(m - 1);
+      }
+      final SequenceFile.Writer writer = SequenceFile.createWriter(fs, jobConf,
+          file, IntWritable.class, IntWritable.class, CompressionType.NONE);
+      try {
+        writer.append(start, end);
+      } finally {
+        writer.close();
+      }
+      System.out.println("Wrote input for Map #" + i);
+    }
+
+    JobClient.runJob(jobConf);
+    fs.delete(TMP_DIR, true);
+    return rand;
+  }
+
+  /**
+   * Generate identity matrix
+   * 
+   * @param conf configuration object
+   * @param m the number of rows.
+   * @param n the number of columns.
+   * @return an m-by-n matrix with ones on the diagonal and zeros elsewhere.
+   * @throws IOException
+   */
+  public static Matrix identity(HamaConfiguration conf, int m, int n)
+      throws IOException {
+    Matrix identity = new DenseMatrix(conf);
+    LOG.info("Create the " + m + " * " + n + " identity matrix : "
+        + identity.getPath());
+
+    for (int i = 0; i < m; i++) {
+      DenseVector vector = new DenseVector();
+      for (int j = 0; j < n; j++) {
+        vector.set(j, (i == j ? 1.0 : 0.0));
+      }
+      identity.setRow(i, vector);
+    }
+
+    identity.setDimension(m, n);
+    return identity;
+  }
+
+  /**
+   * Gets the double value of (i, j)
+   * 
+   * @param i ith row of the matrix
+   * @param j jth column of the matrix
+   * @return the value of entry, or zero If entry is null
+   * @throws IOException
+   */
+  public double get(int i, int j) throws IOException {
+    Cell c = table.get(BytesUtil.getRowIndex(i), BytesUtil.getColumnIndex(j));
+    return (c != null) ? BytesUtil.bytesToDouble(c.getValue()) : 0;
+  }
+
+  /**
+   * Gets the vector of row
+   * 
+   * @param i the row index of the matrix
+   * @return the vector of row
+   * @throws IOException
+   */
+  public DenseVector getRow(int i) throws IOException {
+    return new DenseVector(table.getRow(BytesUtil.getRowIndex(i)));
+  }
+  
+  /**
+   * Gets the vector of column
+   * 
+   * @param j the column index of the matrix
+   * @return the vector of column
+   * @throws IOException
+   */
+  public DenseVector getColumn(int j) throws IOException {
+    byte[] columnKey = BytesUtil.getColumnIndex(j);
+    byte[][] c = { columnKey };
+    Scanner scan = table.getScanner(c, HConstants.EMPTY_START_ROW);
+
+    MapWritable trunk = new MapWritable();
+
+    for (RowResult row : scan) {
+      trunk.put(new IntWritable(BytesUtil.bytesToInt(row.getRow())), new DoubleEntry(row
+          .get(columnKey)));
+    }
+
+    return new DenseVector(trunk);
+  }
+
+  /**
+   * Set the row of a matrix to a given vector
+   * 
+   * @param row
+   * @param vector
+   * @throws IOException
+   */
+  public void setRow(int row, Vector vector) throws IOException {
+    VectorUpdate update = new VectorUpdate(row);
+    update.putAll(((DenseVector) vector).getEntries());
+    table.commit(update.getBatchUpdate());
+  }
+
+  /**
+   * Set the column of a matrix to a given vector
+   * 
+   * @param column
+   * @param vector
+   * @throws IOException
+   */
+  public void setColumn(int column, Vector vector) throws IOException {
+    for (int i = 0; i < vector.size(); i++) {
+      VectorUpdate update = new VectorUpdate(i);
+      update.put(column, vector.get(i));
+      table.commit(update.getBatchUpdate());
+    }
+  }
+  
+  /**
+   * A = B + A
+   * 
+   * @param B
+   * @return A
+   * @throws IOException
+   */
+  public Matrix add(Matrix B) throws IOException {
+    Matrix result = new DenseMatrix(config);
+    JobConf jobConf = new JobConf(config);
+    jobConf.setJobName("addition MR job" + result.getPath());
+
+    jobConf.setNumMapTasks(config.getNumMapTasks());
+    jobConf.setNumReduceTasks(config.getNumReduceTasks());
+
+    RowCyclicAdditionMap.initJob(this.getPath(), B.getPath(),
+        RowCyclicAdditionMap.class, IntWritable.class, MapWritable.class,
+        jobConf);
+    RowCyclicAdditionReduce.initJob(result.getPath(),
+        RowCyclicAdditionReduce.class, jobConf);
+
+    JobManager.execute(jobConf, result);
+    return result;
+  }
+
+  /**
+   * A = alpha*B + A
+   * 
+   * @param alpha
+   * @param B
+   * @return A
+   * @throws IOException
+   */
+  public Matrix add(double alpha, Matrix B) throws IOException {
+    Matrix temp = new DenseMatrix(config);
+    temp.set(alpha, B);
+    
+    Matrix result = this.add(temp);
+    return result;
+  }
+  
+  /**
+   * C = A*B using SIMD algorithm
+   * 
+   * @param B
+   * @return C
+   * @throws IOException
+   */
+  public Matrix mult(Matrix B) throws IOException {
+    Matrix result = new DenseMatrix(config);
+
+    JobConf jobConf = new JobConf(config);
+    jobConf.setJobName("multiplication MR job : " + result.getPath());
+
+    jobConf.setNumMapTasks(config.getNumMapTasks());
+    jobConf.setNumReduceTasks(config.getNumReduceTasks());
+
+    SIMDMultiplyMap.initJob(this.getPath(), B.getPath(), SIMDMultiplyMap.class,
+        IntWritable.class, MapWritable.class, jobConf);
+    SIMDMultiplyReduce.initJob(result.getPath(), SIMDMultiplyReduce.class,
+        jobConf);
+    JobManager.execute(jobConf, result);
+    return result;
+  }
+
+  /**
+   * C = A * B using Blocking algorithm
+   * 
+   * @param B
+   * @param blocks the number of blocks
+   * @return C
+   * @throws IOException
+   */
+  public Matrix mult(Matrix B, int blocks) throws IOException {
+    Matrix collectionTable = new DenseMatrix(config);
+    LOG.info("Collect Blocks");
+
+    collectBlocksMapRed(this.getPath(), collectionTable, blocks, true);
+    collectBlocksMapRed(B.getPath(), collectionTable, blocks, false);
+    
+    Matrix result = new DenseMatrix(config);
+
+    JobConf jobConf = new JobConf(config);
+    jobConf.setJobName("multiplication MR job : " + result.getPath());
+
+    jobConf.setNumMapTasks(config.getNumMapTasks());
+    jobConf.setNumReduceTasks(config.getNumReduceTasks());
+
+    BlockMultiplyMap.initJob(collectionTable.getPath(), BlockMultiplyMap.class,
+        BlockID.class, BlockWritable.class, jobConf);
+    BlockMultiplyReduce.initJob(result.getPath(), BlockMultiplyReduce.class,
+        jobConf);
+
+    JobManager.execute(jobConf, result);
+    // Should be collectionTable removed?
+    return result;
+  }
+
+  /**
+   * C = alpha*A*B + C
+   * 
+   * @param alpha
+   * @param B
+   * @param C
+   * @return C
+   * @throws IOException
+   */
+  public Matrix multAdd(double alpha, Matrix B, Matrix C) throws IOException {
+    // TODO Auto-generated method stub
+    return null;
+  }
+
+  /**
+   * Computes the given norm of the matrix
+   * 
+   * @param type
+   * @return norm of the matrix
+   * @throws IOException
+   */
+  public double norm(Norm type) throws IOException {
+    // TODO Auto-generated method stub
+    return 0;
+  }
+
+  /**
+   * Returns type of matrix
+   */
+  public String getType() {
+    return this.getClass().getSimpleName();
+  }
+
+  /**
+   * Returns the sub matrix formed by selecting certain rows and
+   * columns from a bigger matrix. The sub matrix is a in-memory operation only.
+   * 
+   * @param i0 the start index of row
+   * @param i1 the end index of row
+   * @param j0 the start index of column
+   * @param j1 the end index of column
+   * @return the sub matrix of matrix
+   * @throws IOException
+   */
+  public SubMatrix subMatrix(int i0, int i1, int j0, int j1) throws IOException {
+    int columnSize = (j1 - j0) + 1;
+    SubMatrix result = new SubMatrix((i1 - i0) + 1, columnSize);
+
+    byte[][] cols = new byte[columnSize][];
+    for (int j = j0, jj = 0; j <= j1; j++, jj++) {
+      cols[jj] = BytesUtil.getColumnIndex(j);
+    }
+
+    Scanner scan = table.getScanner(cols, BytesUtil.getRowIndex(i0), BytesUtil
+        .getRowIndex(i1 + 1));
+    Iterator<RowResult> it = scan.iterator();
+    int i = 0;
+    RowResult rs = null;
+    while (it.hasNext()) {
+      rs = it.next();
+      for (int j = j0, jj = 0; j <= j1; j++, jj++) {
+        result.set(i, jj, rs.get(BytesUtil.getColumnIndex(j)).getValue());
+      }
+      i++;
+    }
+
+    scan.close();
+    return result;
+  }
+
+  /**
+   * Collect Blocks
+   * 
+   * @param path a input path
+   * @param collectionTable the collection table
+   * @param blockNum the number of blocks
+   * @param bool
+   * @throws IOException
+   */
+  public void collectBlocksMapRed(String path, Matrix collectionTable,
+      int blockNum, boolean bool) throws IOException {
+    double blocks = Math.pow(blockNum, 0.5);
+    if (!String.valueOf(blocks).endsWith(".0"))
+      throw new IOException("can't divide.");
+
+    int block_size = (int) blocks;
+    collectionTable.setDimension(block_size, block_size);
+
+    JobConf jobConf = new JobConf(config);
+    jobConf.setJobName("Blocking MR job" + getPath());
+
+    jobConf.setNumMapTasks(config.getNumMapTasks());
+    jobConf.setNumReduceTasks(config.getNumReduceTasks());
+    jobConf.setMapperClass(CollectBlocksMapper.class);
+    jobConf.setInputFormat(VectorInputFormat.class);
+    jobConf.set(VectorInputFormat.COLUMN_LIST, Constants.COLUMN);
+
+    FileInputFormat.addInputPaths(jobConf, path);
+
+    CollectBlocksMapper.initJob(collectionTable.getPath(), bool, block_size,
+        this.getRows(), this.getColumns(), jobConf);
+
+    JobManager.execute(jobConf);
+  }
+}

Modified: incubator/hama/trunk/src/java/org/apache/hama/DenseVector.java
URL: http://svn.apache.org/viewvc/incubator/hama/trunk/src/java/org/apache/hama/DenseVector.java?rev=744798&r1=744797&r2=744798&view=diff
==============================================================================
--- incubator/hama/trunk/src/java/org/apache/hama/DenseVector.java (original)
+++ incubator/hama/trunk/src/java/org/apache/hama/DenseVector.java Mon Feb 16 04:35:23 2009
@@ -1,210 +1,228 @@
-/**
- * Copyright 2007 The Apache Software Foundation
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hama;
-
-import java.util.Iterator;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.hadoop.hbase.io.Cell;
-import org.apache.hadoop.hbase.io.RowResult;
-import org.apache.hama.io.DoubleEntry;
-import org.apache.hama.io.HMapWritable;
-import org.apache.hama.util.BytesUtil;
-import org.apache.log4j.Logger;
-
-/**
- * This class represents a dense vector.
- */
-public class DenseVector extends AbstractVector implements Vector {
-  static final Logger LOG = Logger.getLogger(DenseVector.class);
-
-  public DenseVector() {
-    this(new HMapWritable<Integer, DoubleEntry>());
-  }
-
-  public DenseVector(HMapWritable<Integer, DoubleEntry> m) {
-    this.entries = m;
-  }
-
-  public DenseVector(RowResult row) {
-    this.entries = new HMapWritable<Integer, DoubleEntry>();
-    for (Map.Entry<byte[], Cell> f : row.entrySet()) {
-      this.entries.put(BytesUtil.getColumnIndex(f.getKey()), 
-          new DoubleEntry(f.getValue()));
-    }
-  }
-
-  /**
-   * x = alpha*v + x
-   * 
-   * @param alpha
-   * @param v
-   * @return x = alpha*v + x
-   */
-  public DenseVector add(double alpha, Vector v) {
-    if (alpha == 0)
-      return this;
-
-    for (int i = 0; i < this.size(); i++) {
-      set(i, get(i) + alpha * v.get(i));
-    }
-    return this;
-  }
-
-  /**
-   * x = v + x
-   * 
-   * @param v2
-   * @return x = v + x
-   */
-  public DenseVector add(Vector v2) {
-    if (this.size() == 0) {
-      DenseVector trunk = (DenseVector) v2;
-      this.entries = trunk.entries;
-      return this;
-    }
-
-    for (int i = 0; i < this.size(); i++) {
-      double value = (this.get(i) + v2.get(i));
-
-      this.entries.put(i, new DoubleEntry(value));
-    }
-
-    return this;
-  }
-
-  /**
-   * x dot v
-   * 
-   * @param v
-   * @return x dot v
-   */
-  public double dot(Vector v) {
-    double cosine = 0.0;
-    double q_i, d_i;
-    for (int i = 0; i < Math.min(this.size(), v.size()); i++) {
-      q_i = v.get(i);
-      d_i = this.get(i);
-      cosine += q_i * d_i;
-    }
-    return cosine / (this.getNorm2() * ((DenseVector) v).getNorm2());
-  }
-
-  /**
-   * v = alpha*v 
-   * 
-   * @param alpha
-   * @return v = alpha*v
-   */
-  public Vector scale(double alpha) {
-    for(Map.Entry<Integer, DoubleEntry> e : this.entries.entrySet()) {
-      this.entries.put(e.getKey(), new DoubleEntry(e.getValue().getValue() * alpha));
-    }
-    return this;
-  }
-
-  /**
-   * Computes the given norm of the vector
-   * 
-   * @param type
-   * @return norm of the vector
-   */
-  public double norm(Norm type) {
-    if (type == Norm.One)
-      return getNorm1();
-    else if (type == Norm.Two)
-      return getNorm2();
-    else if (type == Norm.TwoRobust)
-      return getNorm2Robust();
-    else
-      return getNormInf();
-  }
-
-  /**
-   * Sets the vector
-   * 
-   * @param v
-   * @return x = v
-   */
-  public DenseVector set(Vector v) {
-    return new DenseVector(((DenseVector) v).getEntries());
-  }
-
-  public double getNorm1() {
-    double sum = 0.0;
-
-    Set<Integer> keySet = this.entries.keySet();
-    Iterator<Integer> it = keySet.iterator();
-
-    while (it.hasNext()) {
-      sum += get(it.next());
-    }
-
-    return sum;
-  }
-
-  public double getNorm2() {
-    double square_sum = 0.0;
-
-    Set<Integer> keySet = entries.keySet();
-    Iterator<Integer> it = keySet.iterator();
-
-    while (it.hasNext()) {
-      double value = get(it.next());
-      square_sum += value * value;
-    }
-
-    return Math.sqrt(square_sum);
-  }
-
-  public double getNorm2Robust() {
-    // TODO Auto-generated method stub
-    return 0;
-  }
-
-  public double getNormInf() {
-    // TODO Auto-generated method stub
-    return 0;
-  }
-
-  /**
-   * Returns a sub-vector.
-   * 
-   * @param i0 the index of the first element
-   * @param i1 the index of the last element
-   * @return v[i0:i1]
-   */
-  public DenseVector subVector(int i0, int i1) {
-    DenseVector res = new DenseVector();
-    for (int i = i0; i <= i1; i++) {
-      res.set(i, get(i));
-    }
-
-    return res;
-  }
-
-  /**
-   * Clears the entries.
-   */
-  public void clear() {
-    this.entries = null;
-  }
-}
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hama;
+
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.hadoop.hbase.io.Cell;
+import org.apache.hadoop.hbase.io.RowResult;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.MapWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.Writable;
+import org.apache.hama.io.DoubleEntry;
+import org.apache.hama.util.BytesUtil;
+import org.apache.log4j.Logger;
+
+/**
+ * This class represents a dense vector.
+ */
+public class DenseVector extends AbstractVector implements Vector {
+  static final Logger LOG = Logger.getLogger(DenseVector.class);
+  public DenseVector() {
+    this(new MapWritable());
+  }
+
+  public DenseVector(MapWritable m) {
+    this.entries = m;
+  }
+
+  public DenseVector(RowResult row) {
+    this.entries = new MapWritable();
+    for (Map.Entry<byte[], Cell> f : row.entrySet()) {
+      this.entries.put(new IntWritable(BytesUtil.getColumnIndex(f.getKey())), 
+          new DoubleEntry(f.getValue()));
+    }
+  }
+
+  public DenseVector(int row, MapWritable m) {
+    this.entries = m;
+    this.entries.put(new Text("row"), new IntWritable(row));
+  }
+  
+  public void setRow(int row) {
+    this.entries.put(new Text("row"), new IntWritable(row));
+  }
+  
+  public int getRow() {
+    return ((IntWritable) this.entries.get(new Text("row"))).get();
+  }
+  
+  /**
+   * x = alpha*v + x
+   * 
+   * @param alpha
+   * @param v
+   * @return x = alpha*v + x
+   */
+  public DenseVector add(double alpha, Vector v) {
+    if (alpha == 0)
+      return this;
+
+    for (int i = 0; i < this.size(); i++) {
+      set(i, get(i) + alpha * v.get(i));
+    }
+    return this;
+  }
+
+  /**
+   * x = v + x
+   * 
+   * @param v2
+   * @return x = v + x
+   */
+  public DenseVector add(Vector v2) {
+    if (this.size() == 0) {
+      DenseVector trunk = (DenseVector) v2;
+      this.entries = trunk.entries;
+      return this;
+    }
+
+    for (int i = 0; i < this.size(); i++) {
+      double value = (this.get(i) + v2.get(i));
+
+      this.entries.put(new IntWritable(i), new DoubleEntry(value));
+    }
+
+    return this;
+  }
+
+  /**
+   * x dot v
+   * 
+   * @param v
+   * @return x dot v
+   */
+  public double dot(Vector v) {
+    double cosine = 0.0;
+    double q_i, d_i;
+    for (int i = 0; i < Math.min(this.size(), v.size()); i++) {
+      q_i = v.get(i);
+      d_i = this.get(i);
+      cosine += q_i * d_i;
+    }
+    return cosine / (this.getNorm2() * ((DenseVector) v).getNorm2());
+  }
+
+  /**
+   * v = alpha*v 
+   * 
+   * @param alpha
+   * @return v = alpha*v
+   */
+  public Vector scale(double alpha) {
+    for(Map.Entry<Writable, Writable> e : this.entries.entrySet()) {
+      this.entries.put(e.getKey(), new DoubleEntry(((DoubleEntry) e.getValue()).getValue() * alpha));
+    }
+    return this;
+  }
+
+  /**
+   * Computes the given norm of the vector
+   * 
+   * @param type
+   * @return norm of the vector
+   */
+  public double norm(Norm type) {
+    if (type == Norm.One)
+      return getNorm1();
+    else if (type == Norm.Two)
+      return getNorm2();
+    else if (type == Norm.TwoRobust)
+      return getNorm2Robust();
+    else
+      return getNormInf();
+  }
+
+  /**
+   * Sets the vector
+   * 
+   * @param v
+   * @return x = v
+   */
+  public DenseVector set(Vector v) {
+    return new DenseVector(((DenseVector) v).getEntries());
+  }
+
+  public double getNorm1() {
+    double sum = 0.0;
+
+    Set<Writable> keySet = this.entries.keySet();
+    Iterator<Writable> it = keySet.iterator();
+
+    while (it.hasNext()) {
+      sum += get(((IntWritable) it.next()).get());
+    }
+
+    return sum;
+  }
+
+  public double getNorm2() {
+    double square_sum = 0.0;
+
+    Set<Writable> keySet = entries.keySet();
+    Iterator<Writable> it = keySet.iterator();
+
+    while (it.hasNext()) {
+      double value = get(((IntWritable) it.next()).get());
+      square_sum += value * value;
+    }
+
+    return Math.sqrt(square_sum);
+  }
+
+  public double getNorm2Robust() {
+    // TODO Auto-generated method stub
+    return 0;
+  }
+
+  public double getNormInf() {
+    // TODO Auto-generated method stub
+    return 0;
+  }
+
+  /**
+   * Returns a sub-vector.
+   * 
+   * @param i0 the index of the first element
+   * @param i1 the index of the last element
+   * @return v[i0:i1]
+   */
+  public DenseVector subVector(int i0, int i1) {
+    DenseVector res = new DenseVector();
+    if(this.entries.containsKey(new Text("row"))) 
+        res.setRow(this.getRow());
+    
+    for (int i = i0; i <= i1; i++) {
+      res.set(i, get(i));
+    }
+
+    return res;
+  }
+
+  /**
+   * Clears the entries.
+   */
+  public void clear() {
+    this.entries = null;
+  }
+}

Modified: incubator/hama/trunk/src/java/org/apache/hama/Vector.java
URL: http://svn.apache.org/viewvc/incubator/hama/trunk/src/java/org/apache/hama/Vector.java?rev=744798&r1=744797&r2=744798&view=diff
==============================================================================
--- incubator/hama/trunk/src/java/org/apache/hama/Vector.java (original)
+++ incubator/hama/trunk/src/java/org/apache/hama/Vector.java Mon Feb 16 04:35:23 2009
@@ -1,147 +1,147 @@
-/**
- * Copyright 2007 The Apache Software Foundation
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hama;
-
-import java.util.Iterator;
-
-import org.apache.hama.io.DoubleEntry;
-
-/**
- * Basic vector interface.
- */
-public interface Vector {
-
-  /**
-   * Size of the vector
-   * 
-   * @return size of the vector
-   */
-  public int size();
-
-  /**
-   * Gets the value of index
-   * 
-   * @param index
-   * @return v(index)
-   */
-  public double get(int index);
-
-  /**
-   * Sets the value of index
-   * 
-   * @param index
-   * @param value
-   */
-  public void set(int index, double value);
-
-  /**
-   * Sets the vector
-   * 
-   * @param v
-   * @return x = v
-   */
-  public Vector set(Vector v);
-
-  /**
-   * Adds the value to v(index)
-   * 
-   * @param index
-   * @param value
-   */
-  public void add(int index, double value);
-
-  /**
-   * x = alpha*v + x
-   * 
-   * @param alpha
-   * @param v
-   * @return x = alpha*v + x
-   */
-  public Vector add(double alpha, Vector v);
-
-  /**
-   * x = v + x
-   * 
-   * @param v
-   * @return x = v + x
-   */
-  public Vector add(Vector v);
-
-  /**
-   * x dot v
-   * 
-   * @param v
-   * @return x dot v
-   */
-  public double dot(Vector v);
-
-  /**
-   * v = alpha*v 
-   * 
-   * @param alpha
-   * @return v = alpha*v
-   */
-  public Vector scale(double alpha);
-  
-  /**
-   * Returns a sub-vector.
-   * 
-   * @param i0 the index of the first element
-   * @param i1 the index of the last element
-   * @return v[i0:i1]
-   */
-  public Vector subVector( int i0, int i1 ); 
-  
-  /**
-   * Computes the given norm of the vector
-   * 
-   * @param type
-   * @return norm of the vector
-   */
-  public double norm(Norm type);
-
-  /**
-   * Supported vector-norms.
-   */
-  enum Norm {
-
-    /** Sum of the absolute values of the entries */
-    One,
-
-    /** The root of sum of squares */
-    Two,
-
-    /**
-     * As the 2 norm may overflow, an overflow resistant version is also
-     * available. Note that it may be slower.
-     */
-    TwoRobust,
-
-    /** Largest entry in absolute value */
-    Infinity
-  }
-
-  /**
-   * Returns an iterator
-   * 
-   * @return iterator
-   */
-  public Iterator<DoubleEntry> iterator();
-}
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hama;
+
+import java.util.Iterator;
+
+import org.apache.hadoop.io.Writable;
+
+/**
+ * Basic vector interface.
+ */
+public interface Vector {
+
+  /**
+   * Size of the vector
+   * 
+   * @return size of the vector
+   */
+  public int size();
+
+  /**
+   * Gets the value of index
+   * 
+   * @param index
+   * @return v(index)
+   */
+  public double get(int index);
+
+  /**
+   * Sets the value of index
+   * 
+   * @param index
+   * @param value
+   */
+  public void set(int index, double value);
+
+  /**
+   * Sets the vector
+   * 
+   * @param v
+   * @return x = v
+   */
+  public Vector set(Vector v);
+
+  /**
+   * Adds the value to v(index)
+   * 
+   * @param index
+   * @param value
+   */
+  public void add(int index, double value);
+
+  /**
+   * x = alpha*v + x
+   * 
+   * @param alpha
+   * @param v
+   * @return x = alpha*v + x
+   */
+  public Vector add(double alpha, Vector v);
+
+  /**
+   * x = v + x
+   * 
+   * @param v
+   * @return x = v + x
+   */
+  public Vector add(Vector v);
+
+  /**
+   * x dot v
+   * 
+   * @param v
+   * @return x dot v
+   */
+  public double dot(Vector v);
+
+  /**
+   * v = alpha*v 
+   * 
+   * @param alpha
+   * @return v = alpha*v
+   */
+  public Vector scale(double alpha);
+  
+  /**
+   * Returns a sub-vector.
+   * 
+   * @param i0 the index of the first element
+   * @param i1 the index of the last element
+   * @return v[i0:i1]
+   */
+  public Vector subVector( int i0, int i1 ); 
+  
+  /**
+   * Computes the given norm of the vector
+   * 
+   * @param type
+   * @return norm of the vector
+   */
+  public double norm(Norm type);
+
+  /**
+   * Supported vector-norms.
+   */
+  enum Norm {
+
+    /** Sum of the absolute values of the entries */
+    One,
+
+    /** The root of sum of squares */
+    Two,
+
+    /**
+     * As the 2 norm may overflow, an overflow resistant version is also
+     * available. Note that it may be slower.
+     */
+    TwoRobust,
+
+    /** Largest entry in absolute value */
+    Infinity
+  }
+
+  /**
+   * Returns an iterator
+   * 
+   * @return iterator
+   */
+  public Iterator<Writable> iterator();
+}

Modified: incubator/hama/trunk/src/java/org/apache/hama/algebra/RowCyclicAdditionMap.java
URL: http://svn.apache.org/viewvc/incubator/hama/trunk/src/java/org/apache/hama/algebra/RowCyclicAdditionMap.java?rev=744798&r1=744797&r2=744798&view=diff
==============================================================================
--- incubator/hama/trunk/src/java/org/apache/hama/algebra/RowCyclicAdditionMap.java (original)
+++ incubator/hama/trunk/src/java/org/apache/hama/algebra/RowCyclicAdditionMap.java Mon Feb 16 04:35:23 2009
@@ -1,76 +1,78 @@
-/**
- * Copyright 2007 The Apache Software Foundation
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hama.algebra;
-
-import java.io.IOException;
-
-import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.mapred.FileInputFormat;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.MapReduceBase;
-import org.apache.hadoop.mapred.Mapper;
-import org.apache.hadoop.mapred.OutputCollector;
-import org.apache.hadoop.mapred.Reporter;
-import org.apache.hama.Constants;
-import org.apache.hama.DenseMatrix;
-import org.apache.hama.HamaConfiguration;
-import org.apache.hama.io.VectorWritable;
-import org.apache.hama.mapred.VectorInputFormat;
-import org.apache.log4j.Logger;
-
-public class RowCyclicAdditionMap extends MapReduceBase implements
-Mapper<IntWritable, VectorWritable, IntWritable, VectorWritable> {
-  static final Logger LOG = Logger.getLogger(RowCyclicAdditionMap.class);
-  protected DenseMatrix matrix_b;
-  public static final String MATRIX_B = "hama.addition.matrix.b";
-
-  public void configure(JobConf job) {
-    try {
-      matrix_b = new DenseMatrix(new HamaConfiguration(job), job.get(MATRIX_B, ""));
-    } catch (IOException e) {
-      LOG.warn("Load matrix_b failed : " + e.getMessage());
-    }
-  }
-
-  public static void initJob(String matrix_a, String matrix_b,
-      Class<RowCyclicAdditionMap> map, Class<IntWritable> outputKeyClass,
-      Class<VectorWritable> outputValueClass, JobConf jobConf) {
-
-    jobConf.setMapOutputValueClass(outputValueClass);
-    jobConf.setMapOutputKeyClass(outputKeyClass);
-    jobConf.setMapperClass(map);
-    jobConf.set(MATRIX_B, matrix_b);
-
-
-    jobConf.setInputFormat(VectorInputFormat.class);
-    FileInputFormat.addInputPaths(jobConf, matrix_a);
-    jobConf.set(VectorInputFormat.COLUMN_LIST, Constants.COLUMN);
-  }
-
-  @Override
-  public void map(IntWritable key, VectorWritable value,
-      OutputCollector<IntWritable, VectorWritable> output, Reporter reporter)
-      throws IOException {
-
-    output.collect(key, new VectorWritable(key.get(), 
-        matrix_b.getRow(key.get()).add(value.getDenseVector())));
-
-  }
-}
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hama.algebra;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.MapWritable;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.MapReduceBase;
+import org.apache.hadoop.mapred.Mapper;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hama.Constants;
+import org.apache.hama.DenseMatrix;
+import org.apache.hama.DenseVector;
+import org.apache.hama.HamaConfiguration;
+import org.apache.hama.mapred.VectorInputFormat;
+import org.apache.log4j.Logger;
+
+public class RowCyclicAdditionMap extends MapReduceBase implements
+Mapper<IntWritable, MapWritable, IntWritable, MapWritable> {
+  static final Logger LOG = Logger.getLogger(RowCyclicAdditionMap.class);
+  protected DenseMatrix matrix_b;
+  public static final String MATRIX_B = "hama.addition.matrix.b";
+
+  public void configure(JobConf job) {
+    try {
+      matrix_b = new DenseMatrix(new HamaConfiguration(job), job.get(MATRIX_B, ""));
+    } catch (IOException e) {
+      LOG.warn("Load matrix_b failed : " + e.getMessage());
+    }
+  }
+
+  public static void initJob(String matrix_a, String matrix_b,
+      Class<RowCyclicAdditionMap> map, Class<IntWritable> outputKeyClass,
+      Class<MapWritable> outputValueClass, JobConf jobConf) {
+
+    jobConf.setMapOutputValueClass(outputValueClass);
+    jobConf.setMapOutputKeyClass(outputKeyClass);
+    jobConf.setMapperClass(map);
+    jobConf.set(MATRIX_B, matrix_b);
+
+    jobConf.setInputFormat(VectorInputFormat.class);
+    FileInputFormat.addInputPaths(jobConf, matrix_a);
+    jobConf.set(VectorInputFormat.COLUMN_LIST, Constants.COLUMN);
+  }
+
+  @Override
+  public void map(IntWritable key, MapWritable value,
+      OutputCollector<IntWritable, MapWritable> output, Reporter reporter)
+      throws IOException {
+    
+    DenseVector a = matrix_b.getRow(key.get());
+    DenseVector b = new DenseVector(value);
+    DenseVector c = a.add(b);
+    output.collect(key, c.getEntries());
+
+  }
+}

Modified: incubator/hama/trunk/src/java/org/apache/hama/algebra/RowCyclicAdditionReduce.java
URL: http://svn.apache.org/viewvc/incubator/hama/trunk/src/java/org/apache/hama/algebra/RowCyclicAdditionReduce.java?rev=744798&r1=744797&r2=744798&view=diff
==============================================================================
--- incubator/hama/trunk/src/java/org/apache/hama/algebra/RowCyclicAdditionReduce.java (original)
+++ incubator/hama/trunk/src/java/org/apache/hama/algebra/RowCyclicAdditionReduce.java Mon Feb 16 04:35:23 2009
@@ -1,67 +1,67 @@
-/**
- * Copyright 2007 The Apache Software Foundation
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hama.algebra;
-
-import java.io.IOException;
-import java.util.Iterator;
-
-import org.apache.hadoop.hbase.io.BatchUpdate;
-import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.MapReduceBase;
-import org.apache.hadoop.mapred.OutputCollector;
-import org.apache.hadoop.mapred.Reducer;
-import org.apache.hadoop.mapred.Reporter;
-import org.apache.hama.io.VectorUpdate;
-import org.apache.hama.io.VectorWritable;
-import org.apache.hama.mapred.VectorOutputFormat;
-
-public class RowCyclicAdditionReduce extends MapReduceBase implements
-    Reducer<IntWritable, VectorWritable, IntWritable, VectorUpdate> {
-
-  /**
-   * Use this before submitting a TableReduce job. It will appropriately set up
-   * the JobConf.
-   * 
-   * @param table
-   * @param reducer
-   * @param job
-   */
-  public static void initJob(String table, Class<RowCyclicAdditionReduce> reducer,
-      JobConf job) {
-    job.setOutputFormat(VectorOutputFormat.class);
-    job.setReducerClass(reducer);
-    job.set(VectorOutputFormat.OUTPUT_TABLE, table);
-    job.setOutputKeyClass(IntWritable.class);
-    job.setOutputValueClass(BatchUpdate.class);
-  }
-
-  @Override
-  public void reduce(IntWritable key, Iterator<VectorWritable> values,
-      OutputCollector<IntWritable, VectorUpdate> output, Reporter reporter)
-      throws IOException {
-
-    VectorUpdate update = new VectorUpdate(key.get());
-    update.putAll(values.next().entrySet());
-
-    output.collect(key, update);
-  }
-
-}
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hama.algebra;
+
+import java.io.IOException;
+import java.util.Iterator;
+
+import org.apache.hadoop.hbase.io.BatchUpdate;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.MapWritable;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.MapReduceBase;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reducer;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hama.io.VectorUpdate;
+import org.apache.hama.mapred.VectorOutputFormat;
+
+public class RowCyclicAdditionReduce extends MapReduceBase implements
+    Reducer<IntWritable, MapWritable, IntWritable, VectorUpdate> {
+
+  /**
+   * Use this before submitting a TableReduce job. It will appropriately set up
+   * the JobConf.
+   * 
+   * @param table
+   * @param reducer
+   * @param job
+   */
+  public static void initJob(String table, Class<RowCyclicAdditionReduce> reducer,
+      JobConf job) {
+    job.setOutputFormat(VectorOutputFormat.class);
+    job.setReducerClass(reducer);
+    job.set(VectorOutputFormat.OUTPUT_TABLE, table);
+    job.setOutputKeyClass(IntWritable.class);
+    job.setOutputValueClass(BatchUpdate.class);
+  }
+
+  @Override
+  public void reduce(IntWritable key, Iterator<MapWritable> values,
+      OutputCollector<IntWritable, VectorUpdate> output, Reporter reporter)
+      throws IOException {
+
+    VectorUpdate update = new VectorUpdate(key.get());
+    update.putAll(values.next());
+
+    output.collect(key, update);
+  }
+
+}

Modified: incubator/hama/trunk/src/java/org/apache/hama/algebra/SIMDMultiplyMap.java
URL: http://svn.apache.org/viewvc/incubator/hama/trunk/src/java/org/apache/hama/algebra/SIMDMultiplyMap.java?rev=744798&r1=744797&r2=744798&view=diff
==============================================================================
--- incubator/hama/trunk/src/java/org/apache/hama/algebra/SIMDMultiplyMap.java (original)
+++ incubator/hama/trunk/src/java/org/apache/hama/algebra/SIMDMultiplyMap.java Mon Feb 16 04:35:23 2009
@@ -1,84 +1,85 @@
-/**
- * Copyright 2007 The Apache Software Foundation
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hama.algebra;
-
-import java.io.IOException;
-
-import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.mapred.FileInputFormat;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.MapReduceBase;
-import org.apache.hadoop.mapred.Mapper;
-import org.apache.hadoop.mapred.OutputCollector;
-import org.apache.hadoop.mapred.Reporter;
-import org.apache.hama.Constants;
-import org.apache.hama.DenseMatrix;
-import org.apache.hama.DenseVector;
-import org.apache.hama.HamaConfiguration;
-import org.apache.hama.Matrix;
-import org.apache.hama.io.VectorWritable;
-import org.apache.hama.mapred.VectorInputFormat;
-import org.apache.log4j.Logger;
-
-/**
- * SIMD version
- */
-public class SIMDMultiplyMap extends MapReduceBase implements
-Mapper<IntWritable, VectorWritable, IntWritable, VectorWritable> {
-  static final Logger LOG = Logger.getLogger(SIMDMultiplyMap.class);
-  protected Matrix matrix_b;
-  public static final String MATRIX_B = "hama.multiplication.matrix.b";
-  public static final DenseVector sum = new DenseVector();;
-  
-  public void configure(JobConf job) {
-    try {
-      matrix_b = new DenseMatrix(new HamaConfiguration(job), job.get(MATRIX_B, ""));
-    } catch (IOException e) {
-      LOG.warn("Load matrix_b failed : " + e.getMessage());
-    }
-  }
-
-  public static void initJob(String matrix_a, String matrix_b,
-      Class<SIMDMultiplyMap> map, Class<IntWritable> outputKeyClass,
-      Class<VectorWritable> outputValueClass, JobConf jobConf) {
-
-    jobConf.setMapOutputValueClass(outputValueClass);
-    jobConf.setMapOutputKeyClass(outputKeyClass);
-    jobConf.setMapperClass(map);
-    jobConf.set(MATRIX_B, matrix_b);
-
-    jobConf.setInputFormat(VectorInputFormat.class);
-    FileInputFormat.addInputPaths(jobConf, matrix_a);
-    jobConf.set(VectorInputFormat.COLUMN_LIST, Constants.COLUMN);
-  }
-
-  @Override
-  public void map(IntWritable key, VectorWritable value,
-      OutputCollector<IntWritable, VectorWritable> output, Reporter reporter)
-      throws IOException {
-    sum.clear();
-
-    for(int i = 0; i < value.size(); i++) {
-      sum.add(matrix_b.getRow(i).scale(value.get(i)));
-    }
-    
-    output.collect(key, new VectorWritable(key.get(), sum));
-  }
-}
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hama.algebra;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.MapWritable;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.MapReduceBase;
+import org.apache.hadoop.mapred.Mapper;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hama.Constants;
+import org.apache.hama.DenseMatrix;
+import org.apache.hama.DenseVector;
+import org.apache.hama.HamaConfiguration;
+import org.apache.hama.Matrix;
+import org.apache.hama.io.DoubleEntry;
+import org.apache.hama.mapred.VectorInputFormat;
+import org.apache.log4j.Logger;
+
+/**
+ * SIMD version
+ */
+public class SIMDMultiplyMap extends MapReduceBase implements
+Mapper<IntWritable, MapWritable, IntWritable, MapWritable> {
+  static final Logger LOG = Logger.getLogger(SIMDMultiplyMap.class);
+  protected Matrix matrix_b;
+  public static final String MATRIX_B = "hama.multiplication.matrix.b";
+  public static final DenseVector sum = new DenseVector();;
+  
+  public void configure(JobConf job) {
+    try {
+      matrix_b = new DenseMatrix(new HamaConfiguration(job), job.get(MATRIX_B, ""));
+    } catch (IOException e) {
+      LOG.warn("Load matrix_b failed : " + e.getMessage());
+    }
+  }
+
+  public static void initJob(String matrix_a, String matrix_b,
+      Class<SIMDMultiplyMap> map, Class<IntWritable> outputKeyClass,
+      Class<MapWritable> outputValueClass, JobConf jobConf) {
+
+    jobConf.setMapOutputValueClass(outputValueClass);
+    jobConf.setMapOutputKeyClass(outputKeyClass);
+    jobConf.setMapperClass(map);
+    jobConf.set(MATRIX_B, matrix_b);
+
+    jobConf.setInputFormat(VectorInputFormat.class);
+    FileInputFormat.addInputPaths(jobConf, matrix_a);
+    jobConf.set(VectorInputFormat.COLUMN_LIST, Constants.COLUMN);
+  }
+
+  @Override
+  public void map(IntWritable key, MapWritable value,
+      OutputCollector<IntWritable, MapWritable> output, Reporter reporter)
+      throws IOException {
+    sum.clear();
+
+    for(int i = 0; i < value.size(); i++) {
+      sum.add(matrix_b.getRow(i).scale(((DoubleEntry) value.get(new IntWritable(i))).getValue()));
+    }
+    
+    output.collect(key, sum.getEntries());
+  }
+}



Mime
View raw message