knox-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From kmin...@apache.org
Subject git commit: Additional site documentation and sample cleanup.
Date Wed, 20 Mar 2013 21:09:16 GMT
Updated Branches:
  refs/heads/master f162485af -> e2a62cc8a


Additional site documentation and sample cleanup.


Project: http://git-wip-us.apache.org/repos/asf/incubator-knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-knox/commit/e2a62cc8
Tree: http://git-wip-us.apache.org/repos/asf/incubator-knox/tree/e2a62cc8
Diff: http://git-wip-us.apache.org/repos/asf/incubator-knox/diff/e2a62cc8

Branch: refs/heads/master
Commit: e2a62cc8a7810543fb0f4f7ca6f38d7e11b8bb4c
Parents: f162485
Author: Kevin Minder <kevin.minder@hortonworks.com>
Authored: Wed Mar 20 17:09:11 2013 -0400
Committer: Kevin Minder <kevin.minder@hortonworks.com>
Committed: Wed Mar 20 17:09:11 2013 -0400

----------------------------------------------------------------------
 build.xml                                          |    2 +-
 gateway-release/ext/README                         |    1 +
 gateway-release/lib/README                         |    1 +
 gateway-release/pom.xml                            |    2 +-
 gateway-release/samples/Example.groovy             |   37 ---
 gateway-release/samples/ExamplePutFile.groovy      |   37 +++
 gateway-release/samples/ExampleSubmitJob.groovy    |   61 ++++
 .../samples/ExampleSubmitWorkflow.groovy           |   92 ++++++
 gateway-release/samples/SmokeTestJob.groovy        |   61 ----
 gateway-release/samples/SmokeTestWorkflow.groovy   |   92 ------
 gateway-server/pom.xml                             |    4 +
 .../src/main/resources/META-INF/launcher.cfg       |   16 +
 gateway-site/src/site/markdown/client.md.vm        |   22 +-
 gateway-site/src/site/markdown/examples.md.vm      |  226 ++++++++++++++-
 .../src/site/markdown/getting-started.md.vm        |    8 +-
 gateway-site/src/site/markdown/sandbox.md          |   62 ++++
 gateway-site/src/site/site.xml                     |    1 +
 17 files changed, 518 insertions(+), 207 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/e2a62cc8/build.xml
----------------------------------------------------------------------
diff --git a/build.xml b/build.xml
index 24b9027..7c414a7 100644
--- a/build.xml
+++ b/build.xml
@@ -94,7 +94,7 @@
         </exec>
         <exec executable="${svn.cmd}" dir="gateway-site/target/site">
             <arg value="add"/>
-            <arg value="-q"/>
+            <arg value="--force"/>
             <arg value="."/>
         </exec>
         <exec executable="${svn.cmd}" dir="gateway-site/target/site">

http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/e2a62cc8/gateway-release/ext/README
----------------------------------------------------------------------
diff --git a/gateway-release/ext/README b/gateway-release/ext/README
new file mode 100644
index 0000000..9eb0ca5
--- /dev/null
+++ b/gateway-release/ext/README
@@ -0,0 +1 @@
+THIS DIRECTORY IS WHERE JARS AND CLASSES CONTAINING CUSTOM EXTENSIONS CAN BE PLACED
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/e2a62cc8/gateway-release/lib/README
----------------------------------------------------------------------
diff --git a/gateway-release/lib/README b/gateway-release/lib/README
new file mode 100644
index 0000000..39cee63
--- /dev/null
+++ b/gateway-release/lib/README
@@ -0,0 +1 @@
+THIS DIRECTORY IS RESERVED FOR USE BY FUTURE SYSTEM JARS AND CLASSES
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/e2a62cc8/gateway-release/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-release/pom.xml b/gateway-release/pom.xml
index 1967ca4..0d9cddb 100644
--- a/gateway-release/pom.xml
+++ b/gateway-release/pom.xml
@@ -51,7 +51,7 @@
                                 <transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
                                 <transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
                                     <manifestEntries>
-                                        <Main-Class>org.apache.hadoop.gateway.GatewayServer</Main-Class>
+                                        <Main-Class>org.apache.hadoop.gateway.launcher.Launcher</Main-Class>
                                     </manifestEntries>
                                 </transformer>
                             </transformers>

http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/e2a62cc8/gateway-release/samples/Example.groovy
----------------------------------------------------------------------
diff --git a/gateway-release/samples/Example.groovy b/gateway-release/samples/Example.groovy
deleted file mode 100644
index 952a5f4..0000000
--- a/gateway-release/samples/Example.groovy
+++ /dev/null
@@ -1,37 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import groovy.json.JsonSlurper
-import org.apache.hadoop.gateway.shell.Hadoop
-import org.apache.hadoop.gateway.shell.hdfs.Hdfs
-
-gateway = "https://localhost:8443/gateway/sample"
-username = "mapred"
-password = "mapred-password"
-dataFile = "README"
-
-hadoop = Hadoop.login( gateway, username, password )
-Hdfs.rm( hadoop ).file( "/tmp/example" ).recursive().now()
-Hdfs.put( hadoop ).file( dataFile ).to( "/tmp/example/README" ).now().string
-text = Hdfs.ls( hadoop ).dir( "/tmp/example" ).now().string
-json = (new JsonSlurper()).parseText( text )
-println json.FileStatuses.FileStatus.pathSuffix
-hadoop.shutdown()
-
-
-

http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/e2a62cc8/gateway-release/samples/ExamplePutFile.groovy
----------------------------------------------------------------------
diff --git a/gateway-release/samples/ExamplePutFile.groovy b/gateway-release/samples/ExamplePutFile.groovy
new file mode 100644
index 0000000..952a5f4
--- /dev/null
+++ b/gateway-release/samples/ExamplePutFile.groovy
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import groovy.json.JsonSlurper
+import org.apache.hadoop.gateway.shell.Hadoop
+import org.apache.hadoop.gateway.shell.hdfs.Hdfs
+
+gateway = "https://localhost:8443/gateway/sample"
+username = "mapred"
+password = "mapred-password"
+dataFile = "README"
+
+hadoop = Hadoop.login( gateway, username, password )
+Hdfs.rm( hadoop ).file( "/tmp/example" ).recursive().now()
+Hdfs.put( hadoop ).file( dataFile ).to( "/tmp/example/README" ).now().string
+text = Hdfs.ls( hadoop ).dir( "/tmp/example" ).now().string
+json = (new JsonSlurper()).parseText( text )
+println json.FileStatuses.FileStatus.pathSuffix
+hadoop.shutdown()
+
+
+

http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/e2a62cc8/gateway-release/samples/ExampleSubmitJob.groovy
----------------------------------------------------------------------
diff --git a/gateway-release/samples/ExampleSubmitJob.groovy b/gateway-release/samples/ExampleSubmitJob.groovy
new file mode 100644
index 0000000..d2522ea
--- /dev/null
+++ b/gateway-release/samples/ExampleSubmitJob.groovy
@@ -0,0 +1,61 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import com.jayway.jsonpath.JsonPath
+import org.apache.hadoop.gateway.shell.Hadoop
+import org.apache.hadoop.gateway.shell.hdfs.Hdfs
+import org.apache.hadoop.gateway.shell.job.Job
+
+import static java.util.concurrent.TimeUnit.SECONDS
+
+gateway = "https://localhost:8443/gateway/sample"
+username = "mapred"
+password = "mapred-password"
+dataFile = "LICENSE"
+jarFile = "samples/hadoop-examples.jar"
+
+hadoop = Hadoop.login( gateway, username, password )
+
+println "Delete /tmp/test " + Hdfs.rm(hadoop).file( "/tmp/test" ).recursive().now().statusCode
+println "Create /tmp/test " + Hdfs.mkdir(hadoop).dir( "/tmp/test").now().statusCode
+
+putData = Hdfs.put(hadoop).file( dataFile ).to( "/tmp/test/input/FILE" ).later() {
+  println "Put /tmp/test/input/FILE " + it.statusCode }
+putJar = Hdfs.put(hadoop).file( jarFile ).to( "/tmp/test/hadoop-examples.jar" ).later() {
+  println "Put /tmp/test/hadoop-examples.jar " + it.statusCode }
+hadoop.waitFor( putData, putJar )
+
+jobId = Job.submitJava(hadoop) \
+  .jar( "/tmp/test/hadoop-examples.jar" ) \
+  .app( "wordcount" ) \
+  .input( "/tmp/test/input" ) \
+  .output( "/tmp/test/output" ) \
+  .now().jobId
+println "Submitted job " + jobId
+
+println "Polling for completion..."
+done = false
+count = 0
+while( !done && count++ < 60 ) {
+  sleep( 1000 )
+  json = Job.queryStatus(hadoop).jobId(jobId).now().string
+  done = JsonPath.read( json, "\$.status.jobComplete" )
+}
+println "Done " + done
+
+println "Shutdown " + hadoop.shutdown( 10, SECONDS )
+

http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/e2a62cc8/gateway-release/samples/ExampleSubmitWorkflow.groovy
----------------------------------------------------------------------
diff --git a/gateway-release/samples/ExampleSubmitWorkflow.groovy b/gateway-release/samples/ExampleSubmitWorkflow.groovy
new file mode 100644
index 0000000..4dfc367
--- /dev/null
+++ b/gateway-release/samples/ExampleSubmitWorkflow.groovy
@@ -0,0 +1,92 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import com.jayway.jsonpath.JsonPath
+import org.apache.hadoop.gateway.shell.Hadoop
+import org.apache.hadoop.gateway.shell.hdfs.Hdfs
+import org.apache.hadoop.gateway.shell.workflow.Workflow
+
+import static java.util.concurrent.TimeUnit.SECONDS
+
+gateway = "https://localhost:8443/gateway/sample"
+jobTracker = "sandbox:50300";
+nameNode = "sandbox:8020";
+username = "mapred"
+password = "mapred-password"
+inputFile = "LICENSE"
+jarFile = "samples/hadoop-examples.jar"
+
+definition = """\
+<workflow-app xmlns="uri:oozie:workflow:0.2" name="wordcount-workflow">
+    <start to="root-node"/>
+    <action name="root-node">
+        <java>
+            <job-tracker>$jobTracker</job-tracker>
+            <name-node>hdfs://$nameNode</name-node>
+            <main-class>org.apache.hadoop.examples.WordCount</main-class>
+            <arg>/tmp/test/input</arg>
+            <arg>/tmp/test/output</arg>
+        </java>
+        <ok to="end"/>
+        <error to="fail"/>
+    </action>
+    <kill name="fail">
+        <message>Java failed, error message[\${wf:errorMessage(wf:lastErrorNode())}]</message>
+    </kill>
+    <end name="end"/>
+</workflow-app>
+"""
+
+configuration = """\
+<configuration>
+    <property>
+        <name>user.name</name>
+        <value>$username</value>
+    </property>
+    <property>
+        <name>oozie.wf.application.path</name>
+        <value>hdfs://$nameNode/tmp/test</value>
+    </property>
+</configuration>
+"""
+
+hadoop = Hadoop.login( gateway, username, password )
+
+println "Delete /tmp/test " + Hdfs.rm(hadoop).file( "/tmp/test" ).recursive().now().statusCode
+println "Mkdir /tmp/test " + Hdfs.mkdir(hadoop).dir( "/tmp/test").now().statusCode
+putWorkflow = Hdfs.put(hadoop).text( definition ).to( "/tmp/test/workflow.xml" ).later()
{
+  println "Put /tmp/test/workflow.xml " + it.statusCode }
+putData = Hdfs.put(hadoop).file( inputFile ).to( "/tmp/test/input/FILE" ).later() {
+  println "Put /tmp/test/input/FILE " + it.statusCode }
+putJar = Hdfs.put(hadoop).file( jarFile ).to( "/tmp/test/lib/hadoop-examples.jar" ).later()
{
+  println "Put /tmp/test/lib/hadoop-examples.jar " + it.statusCode }
+hadoop.waitFor( putWorkflow, putData, putJar )
+
+jobId = Workflow.submit(hadoop).text( configuration ).now().jobId
+println "Submitted job " + jobId
+
+println "Polling for completion..."
+status = "UNKNOWN";
+count = 0;
+while( status != "SUCCEEDED" && count++ < 60 ) {
+  sleep( 1000 )
+  json = Workflow.status(hadoop).jobId( jobId ).now().string
+  status = JsonPath.read( json, "\$.status" )
+}
+println "Job status " + status;
+
+println "Shutdown " + hadoop.shutdown( 10, SECONDS )

http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/e2a62cc8/gateway-release/samples/SmokeTestJob.groovy
----------------------------------------------------------------------
diff --git a/gateway-release/samples/SmokeTestJob.groovy b/gateway-release/samples/SmokeTestJob.groovy
deleted file mode 100644
index 871a274..0000000
--- a/gateway-release/samples/SmokeTestJob.groovy
+++ /dev/null
@@ -1,61 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import com.jayway.jsonpath.JsonPath
-import org.apache.hadoop.gateway.shell.Hadoop
-import org.apache.hadoop.gateway.shell.hdfs.Hdfs
-import org.apache.hadoop.gateway.shell.job.Job
-
-import static java.util.concurrent.TimeUnit.SECONDS
-
-gateway = "https://localhost:8443/gateway/sample"
-username = "mapred"
-password = "mapred-password"
-dataFile = "LICENSE"
-jarFile = "samples/hadoop-examples.jar"
-
-hadoop = Hadoop.login( gateway, username, password )
-
-println "Delete /tmp/test " + Hdfs.rm(hadoop).file( "/tmp/test" ).recursive().now().statusCode
-println "Create /tmp/test " + Hdfs.mkdir(hadoop).dir( "/tmp/test").now().statusCode
-
-putData = Hdfs.put(hadoop).file( dataFile ).to( "/tmp/test/input/FILE" ).later() {
-  println "Put /tmp/test/input/FILE " + it.statusCode }
-putJar = Hdfs.put(hadoop).file( jarFile ).to( "/tmp/test/hadoop-examples.jar" ).later() {
-  println "Put /tmp/test/hadoop-examples.jar " + it.statusCode }
-hadoop.waitFor( putData, putJar )
-
-jobId = Job.submitJava(hadoop) \
-  .jar( "/tmp/test/hadoop-examples.jar" ) \
-  .app( "wordcount" ) \
-  .input( "/tmp/test/input" ) \
-  .output( "/tmp/test/output" ) \
-  .now().jobId
-println "Submit job " + jobId
-
-done = false
-count = 0
-while( !done && count++ < 60 ) {
-  sleep( 1000 )
-  json = Job.queryStatus(hadoop).jobId(jobId).now().string
-  done = JsonPath.read( json, "\$.status.jobComplete" )
-}
-println "Done " + done
-
-println "Shutdown " + hadoop.shutdown( 10, SECONDS )
-

http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/e2a62cc8/gateway-release/samples/SmokeTestWorkflow.groovy
----------------------------------------------------------------------
diff --git a/gateway-release/samples/SmokeTestWorkflow.groovy b/gateway-release/samples/SmokeTestWorkflow.groovy
deleted file mode 100644
index c515797..0000000
--- a/gateway-release/samples/SmokeTestWorkflow.groovy
+++ /dev/null
@@ -1,92 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import com.jayway.jsonpath.JsonPath
-import org.apache.hadoop.gateway.shell.Hadoop
-import org.apache.hadoop.gateway.shell.hdfs.Hdfs
-import org.apache.hadoop.gateway.shell.workflow.Workflow
-
-import static java.util.concurrent.TimeUnit.SECONDS
-
-gateway = "https://localhost:8443/gateway/sample"
-jobTracker = "sandbox:50300";
-nameNode = "sandbox:8020";
-username = "mapred"
-password = "mapred-password"
-inputFile = "LICENSE"
-jarFile = "samples/hadoop-examples.jar"
-
-definition = """\
-<workflow-app xmlns="uri:oozie:workflow:0.2" name="wordcount-workflow">
-    <start to="root-node"/>
-    <action name="root-node">
-        <java>
-            <job-tracker>$jobTracker</job-tracker>
-            <name-node>hdfs://$nameNode</name-node>
-            <main-class>org.apache.hadoop.examples.WordCount</main-class>
-            <arg>/tmp/test/input</arg>
-            <arg>/tmp/test/output</arg>
-        </java>
-        <ok to="end"/>
-        <error to="fail"/>
-    </action>
-    <kill name="fail">
-        <message>Java failed, error message[\${wf:errorMessage(wf:lastErrorNode())}]</message>
-    </kill>
-    <end name="end"/>
-</workflow-app>
-"""
-
-configuration = """\
-<configuration>
-    <property>
-        <name>user.name</name>
-        <value>$username</value>
-    </property>
-    <property>
-        <name>oozie.wf.application.path</name>
-        <value>hdfs://$nameNode/tmp/test</value>
-    </property>
-</configuration>
-"""
-
-hadoop = Hadoop.login( gateway, username, password )
-
-println "Delete /tmp/test " + Hdfs.rm(hadoop).file( "/tmp/test" ).recursive().now().statusCode
-println "Mkdir /tmp/test " + Hdfs.mkdir(hadoop).dir( "/tmp/test").now().statusCode
-putWorkflow = Hdfs.put(hadoop).text( definition ).to( "/tmp/test/workflow.xml" ).later()
{
-  println "Put /tmp/test/workflow.xml " + it.statusCode }
-putData = Hdfs.put(hadoop).file( inputFile ).to( "/tmp/test/input/FILE" ).later() {
-  println "Put /tmp/test/input/FILE " + it.statusCode }
-putJar = Hdfs.put(hadoop).file( jarFile ).to( "/tmp/test/lib/hadoop-examples.jar" ).later()
{
-  println "Put /tmp/test/lib/hadoop-examples.jar " + it.statusCode }
-hadoop.waitFor( putWorkflow, putData, putJar )
-
-jobId = Workflow.submit(hadoop).text( configuration ).now().jobId
-println "Submit job " + jobId
-
-status = "UNKNOWN";
-count = 0;
-while( status != "SUCCEEDED" && count++ < 60 ) {
-  sleep( 1000 )
-  json = Workflow.status(hadoop).jobId( jobId ).now().string
-  status = JsonPath.read( json, "\$.status" )
-}
-println "Job status " + status;
-
-println "Shutdown " + hadoop.shutdown( 10, SECONDS )

http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/e2a62cc8/gateway-server/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-server/pom.xml b/gateway-server/pom.xml
index 0748a2b..59d7016 100644
--- a/gateway-server/pom.xml
+++ b/gateway-server/pom.xml
@@ -73,6 +73,10 @@
         </dependency>
         <dependency>
             <groupId>${gateway-group}</groupId>
+            <artifactId>gateway-util-launcher</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>${gateway-group}</groupId>
             <artifactId>gateway-util-urltemplate</artifactId>
         </dependency>
         <dependency>

http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/e2a62cc8/gateway-server/src/main/resources/META-INF/launcher.cfg
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/resources/META-INF/launcher.cfg b/gateway-server/src/main/resources/META-INF/launcher.cfg
new file mode 100644
index 0000000..419051c
--- /dev/null
+++ b/gateway-server/src/main/resources/META-INF/launcher.cfg
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+main.class = org.apache.hadoop.gateway.GatewayServer
+class.path = ../lib; ../lib/*.jar; ../ext; ../ext/*.jar

http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/e2a62cc8/gateway-site/src/site/markdown/client.md.vm
----------------------------------------------------------------------
diff --git a/gateway-site/src/site/markdown/client.md.vm b/gateway-site/src/site/markdown/client.md.vm
index fc325d1..87f5260 100644
--- a/gateway-site/src/site/markdown/client.md.vm
+++ b/gateway-site/src/site/markdown/client.md.vm
@@ -70,7 +70,7 @@ The shell can be run interactively.
 
 The shell can also be used to execute a script by passing a single filename argument.
 
-    java -jar bin/shell-0.2.0-SNAPSHOT.jar sample/SmokeTestJob.groovy
+    java -jar bin/shell-${gateway-version}.jar sample/SmokeTestJob.groovy
 
 When running interactively it may be helpful to reduce some of the output generated by the
shell console.
 Use the following command in the interactive shell to reduce that output.
@@ -88,7 +88,7 @@ Examples
 Once the shell can be launched the DSL can be used to interact with the gateway and Hadoop.
 Below is a very simple example of an interactive shell session to upload a file to HDFS.
 
-    java -jar bin/shell-0.2.0-SNAPSHOT.jar
+    java -jar bin/shell-${gateway-version}.jar
     knox:000> hadoop = Hadoop.login( "https://localhost:8443/gateway/sample", "hdfs",
"hdfs-password" )
     knox:000> Hdfs.put( hadoop ).file( "README" ).to( "/tmp/example/README" ).now()
 
@@ -417,8 +417,8 @@ In order to add new service and commands new classes must be written in
either G
 Fortunately there is a very simple way to add classes and JARs to the shell classpath.
 The first time the shell is executed it will create a configuration file in the same directory
as the JAR with the same base name and a `.cfg` extension.
 
-    bin/shell-0.2.0-SNAPSHOT.jar
-    bin/shell-0.2.0-SNAPSHOT.cfg
+    bin/shell-${gateway-version}.jar
+    bin/shell-${gateway-version}.cfg
 
 That file contains both the main class for the shell as well as a definition of the classpath.
 Currently that file will by default contain the following.
@@ -434,7 +434,7 @@ These happen to be Groovy source files but could with very minor changes
be Java
 The easiest way to add these to the shell is to compile them directory into the `ext` directory.
 *Note: This command depends upon having the Groovy compiler installed and available on the
execution path.*
 
-    groovyc -d ext -cp bin/shell-0.2.0-SNAPSHOT.jar samples/SampleService.groovy samples/SampleSimpleCommand.groovy
samples/SampleComplexCommand.groovy
+    groovyc -d ext -cp bin/shell-${gateway-version}.jar samples/SampleService.groovy samples/SampleSimpleCommand.groovy
samples/SampleComplexCommand.groovy
 
 These source files are available in the samples directory of the distribution but these are
included here for convenience.
 
@@ -551,17 +551,17 @@ Groovy
 The shell included in the distribution is basically an unmodified packaging of the Groovy
shell.
 Therefore these command are functionally equivalent if you have Groovy [installed][15].
 
-    java -jar bin/shell-0.2.0-SNAPSHOT.jar sample/SmokeTestJob.groovy
-    groovy -cp bin/shell-0.2.0-SNAPSHOT.jar sample/SmokeTestJob.groovy
+    java -jar bin/shell-${gateway-version}.jar sample/SmokeTestJob.groovy
+    groovy -cp bin/shell-${gateway-version}.jar sample/SmokeTestJob.groovy
 
 The interactive shell isn't exactly equivalent.
-However the only difference is that the shell-0.2.0-SNAPSHOT.jar automatically executes some
additional imports that are useful for the KnoxShell DSL.
+However the only difference is that the shell-${gateway-version}.jar automatically executes
some additional imports that are useful for the KnoxShell DSL.
 So these two sets of commands should be functionality equivalent.
 ***However there is currently a class loading issue that prevents the groovysh command from
working propertly.***
 
-    java -jar bin/shell-0.2.0-SNAPSHOT.jar
+    java -jar bin/shell-${gateway-version}.jar
 
-    groovysh -cp bin/shell-0.2.0-SNAPSHOT.jar # BROKEN, CLASS LOADING ISSUE
+    groovysh -cp bin/shell-${gateway-version}.jar
     import org.apache.hadoop.gateway.shell.Hadoop
     import org.apache.hadoop.gateway.shell.hdfs.Hdfs
     import org.apache.hadoop.gateway.shell.job.Job
@@ -570,7 +570,7 @@ So these two sets of commands should be functionality equivalent.
 
 Alternatively, you can use the Groovy Console which does not appear to have the same class
loading issue.
 
-    groovyConsole -cp bin/shell-0.2.0-SNAPSHOT.jar
+    groovyConsole -cp bin/shell-${gateway-version}.jar
 
     import org.apache.hadoop.gateway.shell.Hadoop
     import org.apache.hadoop.gateway.shell.hdfs.Hdfs

http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/e2a62cc8/gateway-site/src/site/markdown/examples.md.vm
----------------------------------------------------------------------
diff --git a/gateway-site/src/site/markdown/examples.md.vm b/gateway-site/src/site/markdown/examples.md.vm
index 3541734..672e538 100644
--- a/gateway-site/src/site/markdown/examples.md.vm
+++ b/gateway-site/src/site/markdown/examples.md.vm
@@ -16,7 +16,228 @@ limitations under the License.
 -->
 
 ------------------------------------------------------------------------------
-Example #3: WebHDFS & Templeton/WebHCat
+Apache Knox Gateway - Usage Examples
+------------------------------------------------------------------------------
+This guide provides detailed examples for how to do some basic interactions
+with Hadoop via the Apache Knox Gateway.
+
+The first two examples submit a Java MapReduce job and workflow using the
+KnoxShell DSL
+
+* Example #1: WebHDFS & Templeton/WebHCat via KnoxShell DSL
+* Example #2: WebHDFS & Oozie via KnoxShell DSL
+
+The second two examples submit the same job and workflow but do so using only
+the [cURL](http://curl.haxx.se/) command line HTTP client.
+
+* Example #1: WebHDFS & Templeton/WebHCat via cURL
+* Example #2: WebHDFS & Oozie via KnoxShell cURL
+
+------------------------------------------------------------------------------
+Assumptions
+------------------------------------------------------------------------------
+This document assumes a few things about your environment in order to
+simplify the examples.
+
+1. The JVM is executable as simply java.
+2. The Apache Knox Gateway is installed and functional.
+3. The example commands are executed within the context of the GATEWAY_HOME
+   current directory. The GATEWAY_HOME directory is the directory within the
+   Apache Knox Gateway installation that contains the README file and the bin,
+   conf and deployments directories.
+4. A few examples optionally require the use of commands from a standard
+   Groovy installation.  These examples are optional but to try them you will
+   need Groovy [installed][gii].
+
+[gii]: http://groovy.codehaus.org/Installing+Groovy
+
+------------------------------------------------------------------------------
+Customization
+------------------------------------------------------------------------------
+These examples may need to be tailored to the execution environment.  In
+particular hostnames and ports may need to be changes to match your
+environment.  In particular there are two example files in the distribution
+that may need to be customized.  Take a moment to review these files.
+All of the values that may need to be customized can be found together at the
+top of each file.
+
+* samples/ExampleSubmitJob.groovy
+* samples/ExampleSubmitWorkflow.groovy
+
+If you are using the Sandbox VM for your Hadoop cluster you may want to
+review [these configuration tips][sb].
+
+[sb]: sandbox.html
+
+------------------------------------------------------------------------------
+Example #1: WebHDFS & Templeton/WebHCat via KnoxShell DSL
+------------------------------------------------------------------------------
+This example will submit the familiar WordCount Java MapReduce job to the
+Hadoop cluster via the gateway using the KnoxShell DSL.  There are several
+ways to do this depending upon your preference.
+
+You can use the "embedded" Groovy interpreter provided with the distribution.
+
+    java -jar bin/shell-${gateway-version}.jar samples/ExampleSubmitJob.groovy
+
+You can load the KnoxShell DSL script into the standard Groovy Console.
+
+    groovyConsole -cp bin/shell-${gateway-version}.jar samples/ExampleSubmitJob.groovy
+
+You can manually type in the KnoxShell DSL script into the "embedded" Groovy
+interpreter provided with the distribution.
+
+    java -jar bin/shell-${gateway-version}.jar
+
+Each line from the file below will need to be typed or copied into the
+interactive shell.
+
+***samples/ExampleSubmitJob***
+
+    import com.jayway.jsonpath.JsonPath
+    import org.apache.hadoop.gateway.shell.Hadoop
+    import org.apache.hadoop.gateway.shell.hdfs.Hdfs
+    import org.apache.hadoop.gateway.shell.job.Job
+
+    import static java.util.concurrent.TimeUnit.SECONDS
+
+    gateway = "https://localhost:8443/gateway/sample"
+    username = "mapred"
+    password = "mapred-password"
+    dataFile = "LICENSE"
+    jarFile = "samples/hadoop-examples.jar"
+
+    hadoop = Hadoop.login( gateway, username, password )
+
+    println "Delete /tmp/test " + Hdfs.rm(hadoop).file( "/tmp/test" ).recursive().now().statusCode
+    println "Create /tmp/test " + Hdfs.mkdir(hadoop).dir( "/tmp/test").now().statusCode
+
+    putData = Hdfs.put(hadoop).file( dataFile ).to( "/tmp/test/input/FILE" ).later() {
+      println "Put /tmp/test/input/FILE " + it.statusCode }
+    putJar = Hdfs.put(hadoop).file( jarFile ).to( "/tmp/test/hadoop-examples.jar" ).later()
{
+      println "Put /tmp/test/hadoop-examples.jar " + it.statusCode }
+    hadoop.waitFor( putData, putJar )
+
+    jobId = Job.submitJava(hadoop) \
+      .jar( "/tmp/test/hadoop-examples.jar" ) \
+      .app( "wordcount" ) \
+      .input( "/tmp/test/input" ) \
+      .output( "/tmp/test/output" ) \
+      .now().jobId
+    println "Submitted job " + jobId
+
+    done = false
+    count = 0
+    while( !done && count++ < 60 ) {
+      sleep( 1000 )
+      json = Job.queryStatus(hadoop).jobId(jobId).now().string
+      done = JsonPath.read( json, "\$.status.jobComplete" )
+    }
+    println "Done " + done
+
+    println "Shutdown " + hadoop.shutdown( 10, SECONDS )
+
+------------------------------------------------------------------------------
+Example #2: WebHDFS & Oozie via KnoxShell DSL
+------------------------------------------------------------------------------
+This example will also submit the familiar WordCount Java MapReduce job to the
+Hadoop cluster via the gateway using the KnoxShell DSL.  However in this case
+the job will be submitted via a Oozie workflow.  There are several ways to do
+this depending upon your preference.
+
+You can use the "embedded" Groovy interpreter provided with the distribution.
+    java -jar bin/shell-${gateway-version}.jar samples/ExampleSubmitWorkflow.groovy
+
+You can load the KnoxShell DSL script into the standard Groovy Console.
+    groovyConsole -cp bin/shell-${gateway-version}.jar samples/ExampleSubmitWorkflow.groovy
+
+You can manually type in the KnoxShell DSL script into the "embedded" Groovy
+interpreter provided with the distribution.
+
+    java -jar bin/shell-${gateway-version}.jar
+
+Each line from the file below will need to be typed or copied into the
+interactive shell.
+
+***samples/ExampleSubmitWorkflow.groovy***
+
+    import com.jayway.jsonpath.JsonPath
+    import org.apache.hadoop.gateway.shell.Hadoop
+    import org.apache.hadoop.gateway.shell.hdfs.Hdfs
+    import org.apache.hadoop.gateway.shell.workflow.Workflow
+
+    import static java.util.concurrent.TimeUnit.SECONDS
+
+    gateway = "https://localhost:8443/gateway/sample"
+    jobTracker = "sandbox:50300";
+    nameNode = "sandbox:8020";
+    username = "mapred"
+    password = "mapred-password"
+    inputFile = "LICENSE"
+    jarFile = "samples/hadoop-examples.jar"
+
+    definition = """\
+    <workflow-app xmlns="uri:oozie:workflow:0.2" name="wordcount-workflow">
+        <start to="root-node"/>
+        <action name="root-node">
+            <java>
+                <job-tracker>$jobTracker</job-tracker>
+                <name-node>hdfs://$nameNode</name-node>
+                <main-class>org.apache.hadoop.examples.WordCount</main-class>
+                <arg>/tmp/test/input</arg>
+                <arg>/tmp/test/output</arg>
+            </java>
+            <ok to="end"/>
+            <error to="fail"/>
+        </action>
+        <kill name="fail">
+            <message>Java failed</message>
+        </kill>
+        <end name="end"/>
+    </workflow-app>
+    """
+
+    configuration = """\
+    <configuration>
+        <property>
+            <name>user.name</name>
+            <value>$username</value>
+        </property>
+        <property>
+            <name>oozie.wf.application.path</name>
+            <value>hdfs://$nameNode/tmp/test</value>
+        </property>
+    </configuration>
+    """
+
+    hadoop = Hadoop.login( gateway, username, password )
+
+    println "Delete /tmp/test " + Hdfs.rm(hadoop).file( "/tmp/test" ).recursive().now().statusCode
+    println "Mkdir /tmp/test " + Hdfs.mkdir(hadoop).dir( "/tmp/test").now().statusCode
+    putWorkflow = Hdfs.put(hadoop).text( definition ).to( "/tmp/test/workflow.xml" ).later()
{
+      println "Put /tmp/test/workflow.xml " + it.statusCode }
+    putData = Hdfs.put(hadoop).file( inputFile ).to( "/tmp/test/input/FILE" ).later() {
+      println "Put /tmp/test/input/FILE " + it.statusCode }
+    putJar = Hdfs.put(hadoop).file( jarFile ).to( "/tmp/test/lib/hadoop-examples.jar" ).later()
{
+      println "Put /tmp/test/lib/hadoop-examples.jar " + it.statusCode }
+    hadoop.waitFor( putWorkflow, putData, putJar )
+
+    jobId = Workflow.submit(hadoop).text( configuration ).now().jobId
+    println "Submitted job " + jobId
+
+    status = "UNKNOWN";
+    count = 0;
+    while( status != "SUCCEEDED" && count++ < 60 ) {
+      sleep( 1000 )
+      json = Workflow.status(hadoop).jobId( jobId ).now().string
+      status = JsonPath.read( json, "\$.status" )
+    }
+    println "Job status " + status;
+
+    println "Shutdown " + hadoop.shutdown( 10, SECONDS )
+
+------------------------------------------------------------------------------
+Example #3: WebHDFS & Templeton/WebHCat via cURL
 ------------------------------------------------------------------------------
 The example below illustrates the sequence of curl commands that could be used
 to run a "word count" map reduce job.  It utilizes the hadoop-examples.jar
@@ -75,7 +296,7 @@ DataNode that follows it.
       'https://localhost:8443/gateway/sample/namenode/api/v1/tmp/test?op=DELETE&recursive=true'
 
 ------------------------------------------------------------------------------
-Example #4: WebHDFS & Oozie
+Example #4: WebHDFS & Oozie via cURL
 ------------------------------------------------------------------------------
 The example below illustrates the sequence of curl commands that could be used
 to run a "word count" map reduce job via an Oozie workflow.  It utilizes the
@@ -139,7 +360,6 @@ required.  These replacement values are identivied with { } markup.
     curl -i -k -u mapred:mapred-password -X DELETE \
       'https://localhost:8443/gateway/sample/namenode/api/v1/tmp/test?op=DELETE&recursive=true'
 
-
 ------------------------------------------------------------------------------
 Disclaimer
 ------------------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/e2a62cc8/gateway-site/src/site/markdown/getting-started.md.vm
----------------------------------------------------------------------
diff --git a/gateway-site/src/site/markdown/getting-started.md.vm b/gateway-site/src/site/markdown/getting-started.md.vm
index 05df9c0..411005a 100644
--- a/gateway-site/src/site/markdown/getting-started.md.vm
+++ b/gateway-site/src/site/markdown/getting-started.md.vm
@@ -16,7 +16,7 @@ limitations under the License.
 -->
 
 ------------------------------------------------------------------------------
-Getting Started
+Apache Knox Gateway - Getting Started
 ------------------------------------------------------------------------------
 This guide describes the steps required to install, deploy and validate the
 Apache Knox Gateway.
@@ -46,6 +46,12 @@ gateway where ever it happens to be running.
 The Hadoop cluster should be ensured to have WebHDFS, WebHCat
 (i.e. Templeton) and Oozie configured, deployed and running.
 
+This release of the Apache Knox Gateway has been tested against the
+[Hortonworks Sandbox 1.2][hsb] with [these changes][sb].
+
+[hsb]: http://hortonworks.com/products/hortonworks-sandbox/
+[sb]: sandbox.html
+
 ------------------------------------------------------------------------------
 Installation
 ------------------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/e2a62cc8/gateway-site/src/site/markdown/sandbox.md
----------------------------------------------------------------------
diff --git a/gateway-site/src/site/markdown/sandbox.md b/gateway-site/src/site/markdown/sandbox.md
new file mode 100644
index 0000000..c1aac5d
--- /dev/null
+++ b/gateway-site/src/site/markdown/sandbox.md
@@ -0,0 +1,62 @@
+<!---
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+
+------------------------------------------------------------------------------
+Sandbox Configuration
+------------------------------------------------------------------------------
+This version of the Apache Knox Gateway is tested against
+[Hortonworks Sandbox 1.2][sb]
+
+In order to correct the issue with Sandbox you can use the commands below
+to login to the Sandbox VM and modify the configuration.  This assumes that
+the name sandbox is setup to resolve to the Sandbox VM.  It may be necessary
+to use the IP address of the Sandbox VM instead.  ***This is frequently but
+not always 192.168.56.101.***
+
+    ssh root@sandbox
+    cat /usr/lib/hadoop/conf/hdfs-site.xml | sed s/localhost/sandbox/ > /usr/lib/hadoop/conf/hdfs-site.xml
+    shutdown -r now
+
+In addition to make it very easy to follow along with the samples for the
+gateway you can configure your local system to resolve the address of the
+Sandbox by the names `vm` and `sandbox`.
+
+On Linux or Macintosh systems add a line like this to the end of the
+`/etc/hosts file on` your local machine, ***not the Sandbox VM***.
+*Note: That is a _tab_ character between the 192.168.56.101 and the vm.*
+
+    192.168.56.101	vm sandbox
+
+On Windows systems a similar but different mechanism can be used.  On recent
+versions of windows the file that should be modified is
+`%systemroot%\system32\drivers\etc\hosts`
+
+[sb]: http://hortonworks.com/products/hortonworks-sandbox/
+
+------------------------------------------------------------------------------
+Disclaimer
+------------------------------------------------------------------------------
+The Apache Knox Gateway is an effort undergoing incubation at the
+Apache Software Foundation (ASF), sponsored by the Apache Incubator PMC.
+
+Incubation is required of all newly accepted projects until a further review
+indicates that the infrastructure, communications, and decision making process
+have stabilized in a manner consistent with other successful ASF projects.
+
+While incubation status is not necessarily a reflection of the completeness
+or stability of the code, it does indicate that the project has yet to be
+fully endorsed by the ASF.
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/e2a62cc8/gateway-site/src/site/site.xml
----------------------------------------------------------------------
diff --git a/gateway-site/src/site/site.xml b/gateway-site/src/site/site.xml
index eda5898..5d23bd6 100644
--- a/gateway-site/src/site/site.xml
+++ b/gateway-site/src/site/site.xml
@@ -99,6 +99,7 @@
             <item name="Getting Started" href="getting-started.html"/>
             <item name="Usage Examples" href="examples.html"/>
             <item name="Client (KnoxShell DSL)" href="client.html"/>
+            <item name="Sandbox Configuration" href="sandbox.html"/>
             <item name="Wiki" href="https://cwiki.apache.org/confluence/display/KNOX/Knox"/>
         </menu>
 


Mime
View raw message