From mapreduce-commits-return-5417-apmail-hadoop-mapreduce-commits-archive=hadoop.apache.org@hadoop.apache.org Sat Feb 16 01:13:00 2013 Return-Path: X-Original-To: apmail-hadoop-mapreduce-commits-archive@minotaur.apache.org Delivered-To: apmail-hadoop-mapreduce-commits-archive@minotaur.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id F12B5EB69 for ; Sat, 16 Feb 2013 01:13:00 +0000 (UTC) Received: (qmail 67210 invoked by uid 500); 16 Feb 2013 01:13:00 -0000 Delivered-To: apmail-hadoop-mapreduce-commits-archive@hadoop.apache.org Received: (qmail 66858 invoked by uid 500); 16 Feb 2013 01:13:00 -0000 Mailing-List: contact mapreduce-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: mapreduce-dev@hadoop.apache.org Delivered-To: mailing list mapreduce-commits@hadoop.apache.org Received: (qmail 66840 invoked by uid 99); 16 Feb 2013 01:12:59 -0000 Received: from athena.apache.org (HELO athena.apache.org) (140.211.11.136) by apache.org (qpsmtpd/0.29) with ESMTP; Sat, 16 Feb 2013 01:12:59 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Sat, 16 Feb 2013 01:12:58 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id 85A7A2388C36; Sat, 16 Feb 2013 01:12:22 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1446832 [2/2] - in /hadoop/common/branches/HDFS-347/hadoop-mapreduce-project: ./ conf/ hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/ hadoop-mapreduce-client/hadoop-mapreduce-... Date: Sat, 16 Feb 2013 01:12:20 -0000 To: mapreduce-commits@hadoop.apache.org From: atm@apache.org X-Mailer: svnmailer-1.0.8-patched Message-Id: <20130216011222.85A7A2388C36@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Modified: hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptFinishedEvent.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptFinishedEvent.java?rev=1446832&r1=1446831&r2=1446832&view=diff ============================================================================== --- hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptFinishedEvent.java (original) +++ hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptFinishedEvent.java Sat Feb 16 01:12:07 2013 @@ -105,11 +105,11 @@ public class TaskAttemptFinishedEvent i public TaskID getTaskId() { return attemptId.getTaskID(); } /** Get the task attempt id */ public TaskAttemptID getAttemptId() { - return TaskAttemptID.forName(attemptId.toString()); + return attemptId; } /** Get the task type */ public TaskType getTaskType() { - return TaskType.valueOf(taskType.toString()); + return taskType; } /** Get the task status */ public String getTaskStatus() { return taskStatus.toString(); } Modified: hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskFinishedEvent.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskFinishedEvent.java?rev=1446832&r1=1446831&r2=1446832&view=diff ============================================================================== --- hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskFinishedEvent.java (original) +++ hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskFinishedEvent.java Sat Feb 16 01:12:07 2013 @@ -95,14 +95,10 @@ public class TaskFinishedEvent implement } /** Get task id */ - public TaskID getTaskId() { return TaskID.forName(taskid.toString()); } + public TaskID getTaskId() { return taskid; } /** Get successful task attempt id */ public TaskAttemptID getSuccessfulTaskAttemptId() { - if(successfulAttemptId != null) - { - return TaskAttemptID.forName(successfulAttemptId.toString()); - } - return null; + return successfulAttemptId; } /** Get the task finish time */ public long getFinishTime() { return finishTime; } @@ -110,7 +106,7 @@ public class TaskFinishedEvent implement public Counters getCounters() { return counters; } /** Get task type */ public TaskType getTaskType() { - return TaskType.valueOf(taskType.toString()); + return taskType; } /** Get task status */ public String getTaskStatus() { return status.toString(); } Propchange: hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml ------------------------------------------------------------------------------ Merged /hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml:r1440578-1446830 Modified: hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTaskPage.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTaskPage.java?rev=1446832&r1=1446831&r2=1446832&view=diff ============================================================================== --- hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTaskPage.java (original) +++ hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTaskPage.java Sat Feb 16 01:12:07 2013 @@ -29,6 +29,7 @@ import static org.apache.hadoop.yarn.web import java.util.Collection; +import org.apache.commons.lang.StringEscapeUtils; import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.mapreduce.v2.api.records.TaskId; import org.apache.hadoop.mapreduce.v2.api.records.TaskType; @@ -110,13 +111,17 @@ public class HsTaskPage extends HsView { th(".note", "Note"); TBODY> tbody = headRow._()._().tbody(); - for (TaskAttempt ta : getTaskAttempts()) { + // Write all the data into a JavaScript array of arrays for JQuery + // DataTables to display + StringBuilder attemptsTableData = new StringBuilder("[\n"); + + for (TaskAttempt ta : getTaskAttempts()) { String taid = MRApps.toString(ta.getID()); String nodeHttpAddr = ta.getNodeHttpAddress(); String containerIdString = ta.getAssignedContainerID().toString(); String nodeIdString = ta.getAssignedContainerMgrAddress(); - String nodeRackName = ta.getNodeRackName(); + String nodeRackName = ta.getNodeRackName(); long attemptStartTime = ta.getLaunchTime(); long shuffleFinishTime = -1; @@ -138,58 +143,43 @@ public class HsTaskPage extends HsView { long attemptElapsed = Times.elapsed(attemptStartTime, attemptFinishTime, false); int sortId = ta.getID().getId() + (ta.getID().getTaskId().getId() * 10000); - - TR>> row = tbody.tr(); - TD>>> td = row.td(); - - td.br().$title(String.valueOf(sortId))._(). // sorting - _(taid)._().td(ta.getState().toString()).td().a(".nodelink", - HttpConfig.getSchemePrefix()+ nodeHttpAddr, - nodeRackName + "/" + nodeHttpAddr); - td._(); - row.td(). - a(".logslink", - url("logs", nodeIdString, containerIdString, taid, app.getJob() - .getUserName()), "logs")._(); - - row.td(). - br().$title(String.valueOf(attemptStartTime))._(). - _(Times.format(attemptStartTime))._(); + + attemptsTableData.append("[\"") + .append(sortId + " ").append(taid).append("\",\"") + .append(ta.getState().toString()).append("\",\"") + + .append("") + .append(nodeRackName + "/" + nodeHttpAddr + "\",\"") + + .append("logs\",\"") + + .append(attemptStartTime).append("\",\""); if(type == TaskType.REDUCE) { - row.td(). - br().$title(String.valueOf(shuffleFinishTime))._(). - _(Times.format(shuffleFinishTime))._(); - row.td(). - br().$title(String.valueOf(sortFinishTime))._(). - _(Times.format(sortFinishTime))._(); + attemptsTableData.append(shuffleFinishTime).append("\",\"") + .append(sortFinishTime).append("\",\""); } - row. - td(). - br().$title(String.valueOf(attemptFinishTime))._(). - _(Times.format(attemptFinishTime))._(); - + attemptsTableData.append(attemptFinishTime).append("\",\""); + if(type == TaskType.REDUCE) { - row.td(). - br().$title(String.valueOf(elapsedShuffleTime))._(). - _(formatTime(elapsedShuffleTime))._(); - row.td(). - br().$title(String.valueOf(elapsedSortTime))._(). - _(formatTime(elapsedSortTime))._(); - row.td(). - br().$title(String.valueOf(elapsedReduceTime))._(). - _(formatTime(elapsedReduceTime))._(); + attemptsTableData.append(elapsedShuffleTime).append("\",\"") + .append(elapsedSortTime).append("\",\"") + .append(elapsedReduceTime).append("\",\""); } - - row. - td(). - br().$title(String.valueOf(attemptElapsed))._(). - _(formatTime(attemptElapsed))._(). - td(".note", Joiner.on('\n').join(ta.getDiagnostics())); - row._(); + attemptsTableData.append(attemptElapsed).append("\",\"") + .append(StringEscapeUtils.escapeJavaScript(StringEscapeUtils.escapeHtml( + Joiner.on('\n').join(ta.getDiagnostics())))).append("\"],\n"); } - - + //Remove the last comma and close off the array of arrays + if(attemptsTableData.charAt(attemptsTableData.length() - 2) == ',') { + attemptsTableData.delete(attemptsTableData.length()-2, attemptsTableData.length()-1); + } + attemptsTableData.append("]"); + html.script().$type("text/javascript"). + _("var attemptsTableData=" + attemptsTableData)._(); + TR>> footRow = tbody._().tfoot().tr(); footRow. th().input("search_init").$type(InputType.text). @@ -237,10 +227,6 @@ public class HsTaskPage extends HsView { footRow._()._()._(); } - private String formatTime(long elapsed) { - return elapsed < 0 ? "N/A" : StringUtils.formatTime(elapsed); - } - /** * @return true if this is a valid request else false. */ @@ -292,24 +278,34 @@ public class HsTaskPage extends HsView { TaskId taskID = MRApps.toTaskID($(TASK_ID)); type = taskID.getTaskType(); } - StringBuilder b = tableInit(). - append(",aoColumnDefs:["); - - b.append("{'sType':'title-numeric', 'aTargets': [ 0"); - if(type == TaskType.REDUCE) { - b.append(", 7, 8, 9, 10"); - } else { //MAP - b.append(", 5"); - } - b.append(" ] }]"); - - // Sort by id upon page load - b.append(", aaSorting: [[0, 'asc']]"); - - b.append("}"); - return b.toString(); + StringBuilder b = tableInit() + .append(", 'aaData': attemptsTableData") + .append(", bDeferRender: true") + .append(", bProcessing: true") + .append("\n,aoColumnDefs:[\n") + + //logs column should not filterable (it includes container ID which may pollute searches) + .append("\n{'aTargets': [ 3 ]") + .append(", 'bSearchable': false }") + + .append("\n, {'sType':'numeric', 'aTargets': [ 0 ]") + .append(", 'mRender': parseHadoopAttemptID }") + + .append("\n, {'sType':'numeric', 'aTargets': [ 4, 5") + //Column numbers are different for maps and reduces + .append(type == TaskType.REDUCE ? ", 6, 7" : "") + .append(" ], 'mRender': renderHadoopDate }") + + .append("\n, {'sType':'numeric', 'aTargets': [") + .append(type == TaskType.REDUCE ? "8, 9, 10, 11" : "6") + .append(" ], 'mRender': renderHadoopElapsedTime }]") + + // Sort by id upon page load + .append("\n, aaSorting: [[0, 'asc']]") + .append("}"); + return b.toString(); } - + private String attemptsPostTableInit() { return "var asInitVals = new Array();\n" + "$('tfoot input').keyup( function () \n{"+ Modified: hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksBlock.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksBlock.java?rev=1446832&r1=1446831&r2=1446832&view=diff ============================================================================== --- hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksBlock.java (original) +++ hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksBlock.java Sat Feb 16 01:12:07 2013 @@ -140,6 +140,7 @@ public class HsTasksBlock extends HtmlBl attemptFinishTime = ta.getFinishTime(); attemptElapsed = ta.getElapsedTime(); } + tasksTableData.append("[\"") .append("") .append(tid).append("\",\"") @@ -205,9 +206,4 @@ public class HsTasksBlock extends HtmlBl footRow._()._()._(); } - - private String formatTime(long elapsed) { - return elapsed < 0 ? "N/A" : StringUtils.formatTime(elapsed); - } - } Modified: hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksPage.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksPage.java?rev=1446832&r1=1446831&r2=1446832&view=diff ============================================================================== --- hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksPage.java (original) +++ hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksPage.java Sat Feb 16 01:12:07 2013 @@ -67,33 +67,25 @@ public class HsTasksPage extends HsView type = MRApps.taskType(symbol); } StringBuilder b = tableInit(). - append(", 'aaData': tasksTableData"); - b.append(", bDeferRender: true"); - b.append(", bProcessing: true"); + append(", 'aaData': tasksTableData") + .append(", bDeferRender: true") + .append(", bProcessing: true") - b.append("\n, aoColumnDefs: [\n"); - b.append("{'sType':'numeric', 'aTargets': [ 0 ]"); - b.append(", 'mRender': parseHadoopID }"); + .append("\n, aoColumnDefs: [\n") + .append("{'sType':'numeric', 'aTargets': [ 0 ]") + .append(", 'mRender': parseHadoopID }") - b.append(", {'sType':'numeric', 'aTargets': [ 4"); - if(type == TaskType.REDUCE) { - b.append(", 9, 10, 11, 12"); - } else { //MAP - b.append(", 7"); - } - b.append(" ], 'mRender': renderHadoopElapsedTime }"); + .append(", {'sType':'numeric', 'aTargets': [ 4") + .append(type == TaskType.REDUCE ? ", 9, 10, 11, 12" : ", 7") + .append(" ], 'mRender': renderHadoopElapsedTime }") - b.append("\n, {'sType':'numeric', 'aTargets': [ 2, 3, 5"); - if(type == TaskType.REDUCE) { - b.append(", 6, 7, 8"); - } else { //MAP - b.append(", 6"); - } - b.append(" ], 'mRender': renderHadoopDate }]"); + .append("\n, {'sType':'numeric', 'aTargets': [ 2, 3, 5") + .append(type == TaskType.REDUCE ? ", 6, 7, 8" : ", 6") + .append(" ], 'mRender': renderHadoopDate }]") // Sort by id upon page load - b.append("\n, aaSorting: [[0, 'asc']]"); - b.append("}"); + .append("\n, aaSorting: [[0, 'asc']]") + .append("}"); return b.toString(); } Modified: hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java?rev=1446832&r1=1446831&r2=1446832&view=diff ============================================================================== --- hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java (original) +++ hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java Sat Feb 16 01:12:07 2013 @@ -136,32 +136,47 @@ public class TestKeyValueTextInputFormat } public void testUTF8() throws Exception { - LineReader in = makeStream("abcd\u20acbdcd\u20ac"); - Text line = new Text(); - in.readLine(line); - assertEquals("readLine changed utf8 characters", - "abcd\u20acbdcd\u20ac", line.toString()); - in = makeStream("abc\u200axyz"); - in.readLine(line); - assertEquals("split on fake newline", "abc\u200axyz", line.toString()); + LineReader in = null; + + try { + in = makeStream("abcd\u20acbdcd\u20ac"); + Text line = new Text(); + in.readLine(line); + assertEquals("readLine changed utf8 characters", + "abcd\u20acbdcd\u20ac", line.toString()); + in = makeStream("abc\u200axyz"); + in.readLine(line); + assertEquals("split on fake newline", "abc\u200axyz", line.toString()); + } finally { + if (in != null) { + in.close(); + } + } } public void testNewLines() throws Exception { - LineReader in = makeStream("a\nbb\n\nccc\rdddd\r\neeeee"); - Text out = new Text(); - in.readLine(out); - assertEquals("line1 length", 1, out.getLength()); - in.readLine(out); - assertEquals("line2 length", 2, out.getLength()); - in.readLine(out); - assertEquals("line3 length", 0, out.getLength()); - in.readLine(out); - assertEquals("line4 length", 3, out.getLength()); - in.readLine(out); - assertEquals("line5 length", 4, out.getLength()); - in.readLine(out); - assertEquals("line5 length", 5, out.getLength()); - assertEquals("end of file", 0, in.readLine(out)); + LineReader in = null; + try { + in = makeStream("a\nbb\n\nccc\rdddd\r\neeeee"); + Text out = new Text(); + in.readLine(out); + assertEquals("line1 length", 1, out.getLength()); + in.readLine(out); + assertEquals("line2 length", 2, out.getLength()); + in.readLine(out); + assertEquals("line3 length", 0, out.getLength()); + in.readLine(out); + assertEquals("line4 length", 3, out.getLength()); + in.readLine(out); + assertEquals("line5 length", 4, out.getLength()); + in.readLine(out); + assertEquals("line5 length", 5, out.getLength()); + assertEquals("end of file", 0, in.readLine(out)); + } finally { + if (in != null) { + in.close(); + } + } } private static void writeFile(FileSystem fs, Path name, @@ -183,14 +198,21 @@ public class TestKeyValueTextInputFormat InputSplit split, JobConf job) throws IOException { List result = new ArrayList(); - RecordReader reader = format.getRecordReader(split, job, - voidReporter); - Text key = reader.createKey(); - Text value = reader.createValue(); - while (reader.next(key, value)) { - result.add(value); - value = reader.createValue(); - } + RecordReader reader = null; + + try { + reader = format.getRecordReader(split, job, voidReporter); + Text key = reader.createKey(); + Text value = reader.createValue(); + while (reader.next(key, value)) { + result.add(value); + value = (Text) reader.createValue(); + } + } finally { + if (reader != null) { + reader.close(); + } + } return result; } Modified: hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLineRecordReader.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLineRecordReader.java?rev=1446832&r1=1446831&r2=1446832&view=diff ============================================================================== --- hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLineRecordReader.java (original) +++ hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLineRecordReader.java Sat Feb 16 01:12:07 2013 @@ -98,6 +98,7 @@ public class TestLineRecordReader extend InterruptedException, ClassNotFoundException { Configuration conf = new Configuration(); conf.set("textinputformat.record.delimiter", "\t\n"); + conf.setInt("mapreduce.job.maps", 1); FileSystem localFs = FileSystem.getLocal(conf); // cleanup localFs.delete(workDir, true); Modified: hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestChainMapReduce.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestChainMapReduce.java?rev=1446832&r1=1446831&r2=1446832&view=diff ============================================================================== --- hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestChainMapReduce.java (original) +++ hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestChainMapReduce.java Sat Feb 16 01:12:07 2013 @@ -82,6 +82,7 @@ public class TestChainMapReduce extends JobConf conf = createJobConf(); conf.setBoolean("localFS", isLocalFS()); + conf.setInt("mapreduce.job.maps", 1); cleanFlags(conf); Modified: hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestClientProtocolProviderImpls.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestClientProtocolProviderImpls.java?rev=1446832&r1=1446831&r2=1446832&view=diff ============================================================================== --- hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestClientProtocolProviderImpls.java (original) +++ hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestClientProtocolProviderImpls.java Sat Feb 16 01:12:07 2013 @@ -42,24 +42,10 @@ public class TestClientProtocolProviderI } - try { - conf.set(MRConfig.FRAMEWORK_NAME, MRConfig.LOCAL_FRAMEWORK_NAME); - conf.set(JTConfig.JT_IPC_ADDRESS, "127.0.0.1:0"); - - new Cluster(conf); - fail("Cluster with Local Framework name should use local JT address"); - } catch (IOException e) { - - } - - try { - conf.set(JTConfig.JT_IPC_ADDRESS, "local"); - Cluster cluster = new Cluster(conf); - assertTrue(cluster.getClient() instanceof LocalJobRunner); - cluster.close(); - } catch (IOException e) { - - } + conf.set(MRConfig.FRAMEWORK_NAME, "local"); + Cluster cluster = new Cluster(conf); + assertTrue(cluster.getClient() instanceof LocalJobRunner); + cluster.close(); } @Test