oozie-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From asasv...@apache.org
Subject oozie git commit: OOZIE-2978 Remove code that handles Pig versions before 0.8 (asasvari)
Date Thu, 03 Aug 2017 11:31:10 GMT
Repository: oozie
Updated Branches:
  refs/heads/master 4f3afcaca -> 7d59ce9ba


OOZIE-2978 Remove code that handles Pig versions before 0.8 (asasvari)


Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/7d59ce9b
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/7d59ce9b
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/7d59ce9b

Branch: refs/heads/master
Commit: 7d59ce9ba0b79a66169b6d8f1c4b941a381e549a
Parents: 4f3afca
Author: Attila Sasvari <asasvari@cloudera.com>
Authored: Thu Aug 3 13:29:54 2017 +0200
Committer: Attila Sasvari <asasvari@cloudera.com>
Committed: Thu Aug 3 13:30:30 2017 +0200

----------------------------------------------------------------------
 docs/src/site/twiki/DG_QuickStart.twiki         |   2 +-
 docs/src/site/twiki/ENG_Building.twiki          |   2 +-
 release-log.txt                                 |   1 +
 .../org/apache/oozie/action/hadoop/PigMain.java |  82 ++----
 .../oozie/action/hadoop/PigMainWithOldAPI.java  | 295 -------------------
 .../action/hadoop/TestPigMainWithOldAPI.java    | 152 ----------
 6 files changed, 34 insertions(+), 500 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/oozie/blob/7d59ce9b/docs/src/site/twiki/DG_QuickStart.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/DG_QuickStart.twiki b/docs/src/site/twiki/DG_QuickStart.twiki
index 9cd8a9e..daeecdb 100644
--- a/docs/src/site/twiki/DG_QuickStart.twiki
+++ b/docs/src/site/twiki/DG_QuickStart.twiki
@@ -17,7 +17,7 @@ For detailed install and configuration instructions refer to [[AG_Install][Oozie
    * Java JDK 1.7+
    * Maven 3.0.1+
    * Hadoop 2.6.0+
-   * Pig 0.7+
+   * Pig 0.10.1+
 
 JDK commands (java, javac) must be in the command path.
 

http://git-wip-us.apache.org/repos/asf/oozie/blob/7d59ce9b/docs/src/site/twiki/ENG_Building.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/ENG_Building.twiki b/docs/src/site/twiki/ENG_Building.twiki
index 56e191d..ee772f7 100644
--- a/docs/src/site/twiki/ENG_Building.twiki
+++ b/docs/src/site/twiki/ENG_Building.twiki
@@ -12,7 +12,7 @@
    * Java JDK 1.7+
    * [[http://maven.apache.org/][Maven 3.0.1+]]
    * [[http://hadoop.apache.org/core/releases.html][Hadoop 2.6.0+]]
-   * [[http://hadoop.apache.org/pig/releases.html][Pig 0.7+]]
+   * [[http://hadoop.apache.org/pig/releases.html][Pig 0.10.1+]]
 
 JDK commands (java, javac) must be in the command path.
 

http://git-wip-us.apache.org/repos/asf/oozie/blob/7d59ce9b/release-log.txt
----------------------------------------------------------------------
diff --git a/release-log.txt b/release-log.txt
index 4023346..7b6a4a7 100644
--- a/release-log.txt
+++ b/release-log.txt
@@ -1,5 +1,6 @@
 -- Oozie 5.0.0 release (trunk - unreleased)
 
+OOZIE-2978 Remove code that handles Pig versions before 0.8 (asasvari)
 OOZIE-2852 Remove simple-json dependency from oozie sharelib (Artem Ervits via gezapeti)
 OOZIE-3021 Error on job or SLA event listening: WARN logs instead of DEBUG (andras.piros
via asasvari)
 OOZIE-2931 Fix warnings during build for "no @param for" (Artem Ervits via gezapeti)

http://git-wip-us.apache.org/repos/asf/oozie/blob/7d59ce9b/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMain.java
----------------------------------------------------------------------
diff --git a/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMain.java b/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMain.java
index 4882981..13ee04f 100644
--- a/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMain.java
+++ b/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMain.java
@@ -231,8 +231,7 @@ public class PigMain extends LauncherMain {
         System.out.println("<<< Invocation of Pig command completed <<<");
         System.out.println();
 
-        // For embedded python or for version of pig lower than 0.8, pig stats are not supported.
-        // So retrieving hadoop Ids here
+        // For embedded python, pig stats are not supported. So retrieving hadoop Ids here
         File file = new File(System.getProperty(EXTERNAL_CHILD_IDS));
         if (!file.exists()) {
             writeExternalChildIDs(logFile, PIG_JOB_IDS_PATTERNS, "Pig");
@@ -317,8 +316,7 @@ public class PigMain extends LauncherMain {
     }
 
     /**
-     * Runs the pig script using PigRunner API if version 0.8 or above. Embedded
-     * pig within python is also supported.
+     * Runs the pig script using PigRunner. Embedded pig within python is also supported.
      *
      * @param args pig command line arguments
      * @param pigLog pig log file
@@ -327,60 +325,42 @@ public class PigMain extends LauncherMain {
      * @throws Exception
      */
     protected void runPigJob(String[] args, String pigLog, boolean resetSecurityManager,
boolean retrieveStats) throws Exception {
-        // running as from the command line
-        boolean pigRunnerExists = true;
-
-        try {
-            Class.forName("org.apache.pig.PigRunner");
-        }
-        catch (ClassNotFoundException ex) {
-            pigRunnerExists = false;
+        PigStats stats = PigRunner.run(args, null);
+        String jobIds = getHadoopJobIds(stats);
+        if (jobIds != null && !jobIds.isEmpty()) {
+            System.out.println("Hadoop Job IDs executed by Pig: " + jobIds);
+            File f = new File(System.getProperty(EXTERNAL_CHILD_IDS));
+            writeExternalData(jobIds, f);
         }
 
-        if (pigRunnerExists) {
-            System.out.println("Run pig script using PigRunner.run() for Pig version 0.8+");
-            PigStats stats = PigRunner.run(args, null);
-            String jobIds = getHadoopJobIds(stats);
-            if (jobIds != null && !jobIds.isEmpty()) {
-                System.out.println("Hadoop Job IDs executed by Pig: " + jobIds);
-                File f = new File(System.getProperty(EXTERNAL_CHILD_IDS));
-                writeExternalData(jobIds, f);
+        if (!stats.isSuccessful()) {
+            if (pigLog != null) {
+                handleError(pigLog);
             }
-            // isSuccessful is the API from 0.9 supported by both PigStats and
-            // EmbeddedPigStats
-            if (!stats.isSuccessful()) {
-                if (pigLog != null) {
-                    handleError(pigLog);
-                }
-                throw new LauncherMainException(PigRunner.ReturnCode.FAILURE);
+            throw new LauncherMainException(PigRunner.ReturnCode.FAILURE);
+        }
+        else {
+            // If pig command is ran with just the "version" option, then
+            // return
+            if (resetSecurityManager) {
+                return;
             }
-            else {
-                // If pig command is ran with just the "version" option, then
-                // return
-                if (resetSecurityManager) {
-                    return;
-                }
-                // Retrieve stats only if user has specified in workflow
-                // configuration
-                if (retrieveStats) {
-                    ActionStats pigStats;
-                    String JSONString;
-                    try {
-                        pigStats = new OoziePigStats(stats);
-                        JSONString = pigStats.toJSON();
-                    } catch (UnsupportedOperationException uoe) {
-                        throw new UnsupportedOperationException(
-                                "Pig stats are not supported for this type of operation",
uoe);
-                    }
-                    File f = new File(System.getProperty(EXTERNAL_ACTION_STATS));
-                    writeExternalData(JSONString, f);
+            // Retrieve stats only if user has specified in workflow
+            // configuration
+            if (retrieveStats) {
+                ActionStats pigStats;
+                String JSONString;
+                try {
+                    pigStats = new OoziePigStats(stats);
+                    JSONString = pigStats.toJSON();
+                } catch (UnsupportedOperationException uoe) {
+                    throw new UnsupportedOperationException(
+                            "Pig stats are not supported for this type of operation", uoe);
                 }
+                File f = new File(System.getProperty(EXTERNAL_ACTION_STATS));
+                writeExternalData(JSONString, f);
             }
         }
-        else {
-                System.out.println("Run pig script using Main.main() for Pig version before
0.8");
-                Main.main(args);
-        }
     }
 
     // write external data(stats, hadoopIds) to the file which will be read by the LauncherAMUtils

http://git-wip-us.apache.org/repos/asf/oozie/blob/7d59ce9b/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMainWithOldAPI.java
----------------------------------------------------------------------
diff --git a/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMainWithOldAPI.java
b/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMainWithOldAPI.java
deleted file mode 100644
index 7884cdd..0000000
--- a/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMainWithOldAPI.java
+++ /dev/null
@@ -1,295 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.oozie.action.hadoop;
-
-import org.apache.pig.Main;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.InputStreamReader;
-import java.io.OutputStream;
-import java.io.FileOutputStream;
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.IOException;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Properties;
-import java.util.Set;
-
-public class PigMainWithOldAPI extends LauncherMain {
-    private static final Set<String> DISALLOWED_PIG_OPTIONS = new HashSet<>();
-
-    static {
-        DISALLOWED_PIG_OPTIONS.add("-4");
-        DISALLOWED_PIG_OPTIONS.add("-log4jconf");
-        DISALLOWED_PIG_OPTIONS.add("-e");
-        DISALLOWED_PIG_OPTIONS.add("-execute");
-        DISALLOWED_PIG_OPTIONS.add("-f");
-        DISALLOWED_PIG_OPTIONS.add("-file");
-        DISALLOWED_PIG_OPTIONS.add("-l");
-        DISALLOWED_PIG_OPTIONS.add("-logfile");
-        DISALLOWED_PIG_OPTIONS.add("-r");
-        DISALLOWED_PIG_OPTIONS.add("-dryrun");
-        DISALLOWED_PIG_OPTIONS.add("-x");
-        DISALLOWED_PIG_OPTIONS.add("-exectype");
-        DISALLOWED_PIG_OPTIONS.add("-P");
-        DISALLOWED_PIG_OPTIONS.add("-propertyFile");
-    }
-
-    public static void main(String[] args) throws Exception {
-        run(PigMainWithOldAPI.class, args);
-    }
-
-    protected void run(String[] args) throws Exception {
-        System.out.println();
-        System.out.println("Oozie Pig action configuration");
-        System.out.println("=================================================================");
-
-        // loading action conf prepared by Oozie
-        Configuration actionConf = new Configuration(false);
-
-        String actionXml = System.getProperty("oozie.action.conf.xml");
-
-        if (actionXml == null) {
-            throw new RuntimeException("Missing Java System Property [oozie.action.conf.xml]");
-        }
-        if (!new File(actionXml).exists()) {
-            throw new RuntimeException("Action Configuration XML file [" + actionXml + "]
does not exist");
-        }
-
-        actionConf.addResource(new Path("file:///", actionXml));
-
-        Properties pigProperties = new Properties();
-        for (Map.Entry<String, String> entry : actionConf) {
-            pigProperties.setProperty(entry.getKey(), entry.getValue());
-        }
-
-        //propagate delegation related props from launcher job to Pig job
-        String jobTokenFile = getFilePathFromEnv("HADOOP_TOKEN_FILE_LOCATION");
-        if (jobTokenFile != null) {
-            pigProperties.setProperty("mapreduce.job.credentials.binary", jobTokenFile);
-            System.out.println("------------------------");
-            System.out.println("Setting env property for mapreduce.job.credentials.binary
to:"
-                    + jobTokenFile);
-            System.out.println("------------------------");
-            System.setProperty("mapreduce.job.credentials.binary", jobTokenFile);
-        }
-        else {
-            System.out.println("Non-kerberos execution");
-        }
-
-        try (OutputStream os = new FileOutputStream("pig.properties")) {
-            pigProperties.store(os, "");
-        }
-
-        System.out.println();
-        System.out.println("pig.properties content:");
-        System.out.println("------------------------");
-        pigProperties.store(System.out, "");
-        System.out.flush();
-        System.out.println("------------------------");
-        System.out.println();
-
-        List<String> arguments = new ArrayList<>();
-        String script = actionConf.get("oozie.pig.script");
-
-        if (script == null) {
-            throw new RuntimeException("Action Configuration does not have [oozie.pig.script]
property");
-        }
-
-        if (!new File(script).exists()) {
-            throw new RuntimeException("Error: Pig script file [" + script + "] does not
exist");
-        }
-
-        System.out.println("Pig script [" + script + "] content: ");
-        System.out.println("------------------------");
-
-        String line;
-        try (BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(script),
"UTF-8"))) {
-            line = br.readLine();
-            while (line != null) {
-                System.out.println(line);
-                line = br.readLine();
-            }
-        }
-
-        System.out.println("------------------------");
-        System.out.println();
-
-        arguments.add("-file");
-        arguments.add(script);
-        String[] params = ActionUtils.getStrings(actionConf, "oozie.pig.params");
-        for (String param : params) {
-            arguments.add("-param");
-            arguments.add(param);
-        }
-
-        String hadoopJobId = System.getProperty("oozie.launcher.job.id");
-        if (hadoopJobId == null) {
-            throw new RuntimeException("Launcher Hadoop Job ID system property not set");
-        }
-
-        String logFile = new File("pig-oozie-" + hadoopJobId + ".log").getAbsolutePath();
-
-        String pigLogLevel = actionConf.get("oozie.pig.log.level", "INFO");
-        String rootLogLevel = actionConf.get("oozie.action." + LauncherAMUtils.ROOT_LOGGER_LEVEL,
"INFO");
-
-        // append required PIG properties to the default hadoop log4j file
-        log4jProperties.setProperty("log4j.rootLogger", rootLogLevel + ", A, B");
-        log4jProperties.setProperty("log4j.logger.org.apache.pig", pigLogLevel + ", A, B");
-        log4jProperties.setProperty("log4j.appender.A", "org.apache.log4j.ConsoleAppender");
-        log4jProperties.setProperty("log4j.appender.A.layout", "org.apache.log4j.PatternLayout");
-        log4jProperties.setProperty("log4j.appender.A.layout.ConversionPattern", "%d [%t]
%-5p %c %x - %m%n");
-        log4jProperties.setProperty("log4j.appender.B", "org.apache.log4j.FileAppender");
-        log4jProperties.setProperty("log4j.appender.B.file", logFile);
-        log4jProperties.setProperty("log4j.appender.B.layout", "org.apache.log4j.PatternLayout");
-        log4jProperties.setProperty("log4j.appender.B.layout.ConversionPattern", "%d [%t]
%-5p %c %x - %m%n");
-
-        String localProps = new File("piglog4j.properties").getAbsolutePath();
-        try (OutputStream os1 = new FileOutputStream(localProps)) {
-            log4jProperties.store(os1, "");
-        }
-
-        arguments.add("-log4jconf");
-        arguments.add(localProps);
-
-        // print out current directory
-        File localDir = new File(localProps).getParentFile();
-        System.out.println("Current (local) dir = " + localDir.getAbsolutePath());
-
-        String pigLog = "pig-" + hadoopJobId + ".log";
-        arguments.add("-logfile");
-        arguments.add(pigLog);
-
-        String[] pigArgs = ActionUtils.getStrings(actionConf, "oozie.pig.args");
-        for (String pigArg : pigArgs) {
-            if (DISALLOWED_PIG_OPTIONS.contains(pigArg)) {
-                throw new RuntimeException("Error: Pig argument " + pigArg + " is not supported");
-            }
-            arguments.add(pigArg);
-        }
-
-        System.out.println("Pig command arguments :");
-        for (String arg : arguments) {
-            System.out.println("             " + arg);
-        }
-
-        System.out.println("=================================================================");
-        System.out.println();
-        System.out.println(">>> Invoking Pig command line now >>>");
-        System.out.println();
-        System.out.flush();
-        LauncherAM.LauncherSecurityManager launcherSecurityManager = new LauncherAM.LauncherSecurityManager();
-        launcherSecurityManager.enable();
-        try {
-            System.out.println();
-            runPigJob(new String[] { "-version" });
-        }
-        catch (SecurityException ex) {
-            launcherSecurityManager.reset();
-        }
-        System.out.println();
-        System.out.flush();
-
-        try {
-            runPigJob(arguments.toArray(new String[arguments.size()]));
-        }
-        catch (SecurityException ex) {
-            if (launcherSecurityManager.getExitInvoked()) {
-                if (launcherSecurityManager.getExitCode() != 0) {
-                    System.err.println();
-                    System.err.println("Pig logfile dump:");
-                    System.err.println();
-                    try {
-                        try (BufferedReader reader = new BufferedReader(new InputStreamReader(new
FileInputStream(pigLog),
-                                "UTF-8"))) {
-                            line = reader.readLine();
-                            while (line != null) {
-                                System.err.println(line);
-                                line = reader.readLine();
-                            }
-                        }
-                    }
-                    catch (FileNotFoundException e) {
-                        System.err.println("pig log file: " + pigLog + "  not found.");
-                    }
-                    throw ex;
-                }
-            }
-        }
-
-        System.out.println();
-        System.out.println("<<< Invocation of Pig command completed <<<");
-        System.out.println();
-
-        // harvesting and recording Hadoop Job IDs
-        Properties jobIds = getHadoopJobIds(logFile);
-        File file = new File(System.getProperty(OUTPUT_PROPERTIES));
-
-        try (OutputStream os = new FileOutputStream(file)) {
-            jobIds.store(os, "");
-        }
-
-        System.out.println(" Hadoop Job IDs executed by Pig: " + jobIds.getProperty(HADOOP_JOBS));
-        System.out.println();
-    }
-
-    protected void runPigJob(String[] args) throws Exception {
-        // running as from the command line
-        Main.main(args);
-    }
-
-    private static final String JOB_ID_LOG_PREFIX = "HadoopJobId: ";
-
-    protected Properties getHadoopJobIds(String logFile) throws IOException {
-        Properties props = new Properties();
-        StringBuffer sb = new StringBuffer(100);
-        if (!new File(logFile).exists()) {
-            System.err.println("pig log file: " + logFile + "  not present. Therefore no
Hadoop jobids found");
-            props.setProperty(HADOOP_JOBS, "");
-        }
-        else {
-            try (BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(logFile),
"UTF-8"))) {
-                String line = br.readLine();
-                String separator = "";
-                while (line != null) {
-                    if (line.contains(JOB_ID_LOG_PREFIX)) {
-                        int jobIdStarts = line.indexOf(JOB_ID_LOG_PREFIX) + JOB_ID_LOG_PREFIX.length();
-                        String jobId = line.substring(jobIdStarts);
-                        int jobIdEnds = jobId.indexOf(" ");
-                        if (jobIdEnds > -1) {
-                            jobId = jobId.substring(0, jobId.indexOf(" "));
-                        }
-                        sb.append(separator).append(jobId);
-                        separator = ",";
-                    }
-                    line = br.readLine();
-                }
-            }
-
-            props.setProperty(HADOOP_JOBS, sb.toString());
-        }
-        return props;
-    }
-}

http://git-wip-us.apache.org/repos/asf/oozie/blob/7d59ce9b/sharelib/pig/src/test/java/org/apache/oozie/action/hadoop/TestPigMainWithOldAPI.java
----------------------------------------------------------------------
diff --git a/sharelib/pig/src/test/java/org/apache/oozie/action/hadoop/TestPigMainWithOldAPI.java
b/sharelib/pig/src/test/java/org/apache/oozie/action/hadoop/TestPigMainWithOldAPI.java
deleted file mode 100644
index d5a5797..0000000
--- a/sharelib/pig/src/test/java/org/apache/oozie/action/hadoop/TestPigMainWithOldAPI.java
+++ /dev/null
@@ -1,152 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.oozie.action.hadoop;
-
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.oozie.action.hadoop.MainTestCase;
-import org.apache.oozie.action.hadoop.PigMainWithOldAPI;
-import org.apache.oozie.action.hadoop.SharelibUtils;
-import org.apache.oozie.test.XFsTestCase;
-import org.apache.oozie.util.XConfiguration;
-
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.FileReader;
-import java.io.FileWriter;
-import java.io.OutputStream;
-import java.io.OutputStreamWriter;
-import java.io.PrintStream;
-import java.io.Writer;
-import java.net.URL;
-import java.security.Permission;
-import java.util.Properties;
-import java.util.concurrent.Callable;
-
-import static groovy.util.GroovyTestCase.assertEquals;
-
-
-/**
- * Test PigMainWithOldAPI class should run a Pig script and write results to
- * output
- */
-public class TestPigMainWithOldAPI extends XFsTestCase implements Callable<Void> {
-    private SecurityManager SECURITY_MANAGER;
-
-    protected void setUp() throws Exception {
-        super.setUp();
-        SECURITY_MANAGER = System.getSecurityManager();
-        PigTestCase.resetPigStats();
-    }
-
-    protected void tearDown() throws Exception {
-        System.setSecurityManager(SECURITY_MANAGER);
-        PigTestCase.resetPigStats();
-        super.tearDown();
-    }
-
-    public void testPigScript() throws Exception {
-        MainTestCase.execute(getTestUser(), this);
-    }
-
-    @Override
-    public Void call() throws Exception {
-        FileSystem fs = getFileSystem();
-
-        Path script = new Path(getTestCaseDir(), "script.pig");
-        Writer w = new FileWriter(script.toString());
-        String pigScript = "set job.name 'test'\n set debug on\n A = load '$IN' using PigStorage(':');\n"
-                + "B = foreach A generate $0 as id;\n store B into '$OUT' USING PigStorage();";
-        w.write(pigScript);
-        w.close();
-
-        Path inputDir = new Path(getFsTestCaseDir(), "input");
-        fs.mkdirs(inputDir);
-        Writer writer = new OutputStreamWriter(fs.create(new Path(inputDir, "data.txt")));
-        writer.write("hello");
-        writer.close();
-
-        Path outputDir = new Path(getFsTestCaseDir(), "output");
-
-        XConfiguration jobConfiguration = new XConfiguration();
-        XConfiguration.copy(createJobConf(), jobConfiguration);
-
-        jobConfiguration.set("user.name", getTestUser());
-        jobConfiguration.set("group.name", getTestGroup());
-        jobConfiguration.setInt("mapred.map.tasks", 1);
-        jobConfiguration.setInt("mapred.map.max.attempts", 1);
-        jobConfiguration.setInt("mapred.reduce.max.attempts", 1);
-        jobConfiguration.set("oozie.pig.script", script.toString());
-        // option to specify whether stats should be stored or not
-
-        SharelibUtils.addToDistributedCache("pig", fs, getFsTestCaseDir(), jobConfiguration);
-
-        String[] params = { "IN=" + inputDir.toUri().getPath(), "OUT=" + outputDir.toUri().getPath()
};
-        ActionUtils.setStrings(jobConfiguration, "oozie.pig.params", params);
-        String[] args = { "-v" };
-        ActionUtils.setStrings(jobConfiguration, "oozie.pig.args", args);
-
-        File actionXml = new File(getTestCaseDir(), "action.xml");
-        OutputStream os = new FileOutputStream(actionXml);
-        jobConfiguration.writeXml(os);
-        os.close();
-
-        File jobIdsFile = new File(getTestCaseDir(), "jobIds.properties");
-
-        setSystemProperty("oozie.launcher.job.id", "" + System.currentTimeMillis());
-        setSystemProperty("oozie.action.conf.xml", actionXml.getAbsolutePath());
-        setSystemProperty("oozie.action.output.properties", jobIdsFile.getAbsolutePath());
-
-        URL url = Thread.currentThread().getContextClassLoader().getResource("PigMain.txt");
-        File classPathDir = new File(url.getPath()).getParentFile();
-        assertTrue(classPathDir.exists());
-        Properties props = jobConfiguration.toProperties();
-        assertEquals(props.getProperty("oozie.pig.args.size"), "1");
-        File pigProps = new File(classPathDir, "pig.properties");
-
-        String user = System.getProperty("user.name");
-        ByteArrayOutputStream data = new ByteArrayOutputStream();
-        PrintStream oldPrintStream = System.out;
-        System.setOut(new PrintStream(data));
-
-
-        try {
-            Writer wr = new FileWriter(pigProps);
-            props.store(wr, "");
-            wr.close();
-            PigMainWithOldAPI.main(null);
-        }
-        finally {
-            pigProps.delete();
-            System.setProperty("user.name", user);
-            System.setOut(oldPrintStream);
-        }
-
-        assertTrue(jobIdsFile.exists());
-        Properties prop = new Properties();
-        prop.load(new FileReader(jobIdsFile));
-        String jobId = prop.getProperty("hadoopJobs");
-        assertTrue(data.toString().contains(jobId));
-        assertTrue(data.toString().contains("Success!"));
-
-        return null;
-    }
-
-}


Mime
View raw message