avro-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cutt...@apache.org
Subject svn commit: r769139 - in /hadoop/avro/trunk: lib/py/ src/doc/content/xdocs/ src/java/org/apache/avro/ src/java/org/apache/avro/ipc/ src/py/avro/ src/test/java/org/apache/avro/ src/test/py/ src/test/schemata/
Date Mon, 27 Apr 2009 20:13:32 GMT
Author: cutting
Date: Mon Apr 27 20:13:31 2009
New Revision: 769139

URL: http://svn.apache.org/viewvc?rev=769139&view=rev
Log:
AVRO-1.  Record fields are now defined with JSON arrays, rather than JSON objects, since fields
are ordered.

Removed:
    hadoop/avro/trunk/lib/py/odict.py
    hadoop/avro/trunk/src/py/avro/jsonparser.py
    hadoop/avro/trunk/src/test/schemata/big.js
    hadoop/avro/trunk/src/test/schemata/fs-name.js
    hadoop/avro/trunk/src/test/schemata/little.js
    hadoop/avro/trunk/src/test/schemata/tree.js
Modified:
    hadoop/avro/trunk/src/doc/content/xdocs/spec.xml
    hadoop/avro/trunk/src/java/org/apache/avro/Protocol.java
    hadoop/avro/trunk/src/java/org/apache/avro/Schema.java
    hadoop/avro/trunk/src/java/org/apache/avro/ipc/Responder.java
    hadoop/avro/trunk/src/py/avro/ipc.py
    hadoop/avro/trunk/src/py/avro/protocol.py
    hadoop/avro/trunk/src/py/avro/schema.py
    hadoop/avro/trunk/src/test/java/org/apache/avro/TestDataFile.java
    hadoop/avro/trunk/src/test/java/org/apache/avro/TestSchema.java
    hadoop/avro/trunk/src/test/py/testio.py
    hadoop/avro/trunk/src/test/py/testioreflect.py
    hadoop/avro/trunk/src/test/schemata/fs-data.js
    hadoop/avro/trunk/src/test/schemata/interop.js
    hadoop/avro/trunk/src/test/schemata/test.js

Modified: hadoop/avro/trunk/src/doc/content/xdocs/spec.xml
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/doc/content/xdocs/spec.xml?rev=769139&r1=769138&r2=769139&view=diff
==============================================================================
--- hadoop/avro/trunk/src/doc/content/xdocs/spec.xml (original)
+++ hadoop/avro/trunk/src/doc/content/xdocs/spec.xml Mon Apr 27 20:13:31 2009
@@ -94,25 +94,28 @@
 	  <p>Records use the type name "record" and support two attributes:</p>
 	  <ul>
 	    <li><code>name</code>: a JSON string providing the name
-	    of the record, </li>
-	    <li><code>fields</code>: a JSON object, listing field
-	    names and schemas.</li>
+	    of the record (optional).</li>
+	    <li><code>fields</code>: a JSON array, listing fields (required).
+	    Each field is a JSON object with the following attributes:
+	      <ul>
+		<li><code>name</code>: a JSON string providing the name
+		  of the field (required), and </li>
+		<li><code>type</code>A JSON object defining a schema, or
+		  a JSON string naming a record definition
+		  (required).</li>
+	      </ul>
+	    </li>
 	  </ul>
 
-	  <p>The fields of a record are <em>ordered</em>.  Two
-	  otherwise identical records whose fields are ordered
-	  differently represent equivalent data but are not
-	  serialized identically.</p>
-
 	  <p>For example, a linked-list of 64-bit values may be defined with:</p>
 	  <source>
 {
   "type": "record", 
   "name": "LongList",
-  "fields" : {
-    "value": "long",               // each element has a long
-    "next": ["LongList", "null"]   // optional next element
-  }
+  "fields" : [
+    {"name": "value", "type": "long"},             // each element has a long
+    {"name": "next", "type": ["LongList", "null"]} // optional next element
+  ]
 }
 	  </source>
 	</section>
@@ -245,10 +248,10 @@
 {
   "type": "record", 
   "name": "test",
-  "fields" : {
-    "a": "long",
-    "b": "string"
-  }
+  "fields" : [
+    {"name": "a", "type": "long"},
+    {"name": "b", "type": "string"}
+  ]
 }
 	  </source>
 	  <p>An instance of this record whose <code>a</code> field has
@@ -453,8 +456,10 @@
   "protocol": "HelloWorld",
 
   "types": [
-    {"name": "Greeting", "type": "record", "fields": {"name": "string"}},
-    {"name": "Curse", "type": "error", "fields": {"message": "string"}}
+    {"name": "Greeting", "type": "record", "fields": [
+      {"name": "message", "type": "string"}]}
+    {"name": "Curse", "type": "error", "fields": [
+      {"name": "message", "type": "string"}]}
   ],
 
   "messages": {

Modified: hadoop/avro/trunk/src/java/org/apache/avro/Protocol.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/Protocol.java?rev=769139&r1=769138&r2=769139&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/Protocol.java (original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/Protocol.java Mon Apr 27 20:13:31 2009
@@ -78,17 +78,18 @@
     
     public String toString() {
       StringBuilder buffer = new StringBuilder();
-      buffer.append("{\"request\": {");
+      buffer.append("{\"request\": [");
       int count = 0;
       for (Map.Entry<String, Schema> entry : request.getFieldSchemas()) {
-        buffer.append("\"");
+        buffer.append("{\"name\": \"");
         buffer.append(entry.getKey());
-        buffer.append("\": ");
+        buffer.append("\", \"type\": ");
         buffer.append(entry.getValue().toString(types));
+        buffer.append("}");
         if (++count < request.getFields().size())
           buffer.append(", ");
       }
-      buffer.append("}, \"response\": "+response.toString(types));
+      buffer.append("], \"response\": "+response.toString(types));
 
       List<Schema> errTypes = errors.getTypes();  // elide system error
       if (errTypes.size() > 1) {
@@ -197,31 +198,26 @@
 
   /** Read a protocol from a Json file. */
   public static Protocol parse(File file) throws IOException {
-    return parse(new FileInputStream(file));
+    return parse(Schema.FACTORY.createJsonParser(file));
   }
 
   /** Read a protocol from a Json string. */
   public static Protocol parse(String string) {
     try {
-      return parse(new ByteArrayInputStream(string.getBytes("UTF-8")));
+      return parse(Schema.FACTORY.createJsonParser
+                   (new ByteArrayInputStream(string.getBytes("UTF-8"))));
     } catch (IOException e) {
       throw new AvroRuntimeException(e);
     }
   }
 
-  /** Read a protocol from a Json stream. */
-  public static Protocol parse(InputStream in) throws IOException {
+  private static Protocol parse(JsonParser parser) {
     try {
-      JsonParser parser = Schema.FACTORY.createJsonParser(in);
-      try {
-        Protocol protocol = new Protocol();
-        protocol.parse(Schema.MAPPER.read(parser));
-        return protocol;
-      } catch (JsonParseException e) {
-        throw new SchemaParseException(e);
-      }
-    } finally {
-      in.close();
+      Protocol protocol = new Protocol();
+      protocol.parse(Schema.MAPPER.read(parser));
+      return protocol;
+    } catch (IOException e) {
+      throw new SchemaParseException(e);
     }
   }
 
@@ -268,12 +264,17 @@
 
   private Message parseMessage(String messageName, JsonNode json) {
     JsonNode requestNode = json.getFieldValue("request");
-    if (requestNode == null)
+    if (requestNode == null || !requestNode.isArray())
       throw new SchemaParseException("No request specified: "+json);
     Map<String,Schema> fields = new LinkedHashMap<String,Schema>();
-    for (Iterator<String> i = requestNode.getFieldNames(); i.hasNext();) {
-      String prop = i.next();
-      fields.put(prop, Schema.parse(requestNode.getFieldValue(prop), types));
+    for (JsonNode field : requestNode) {
+      JsonNode fieldNameNode = field.getFieldValue("name");
+      if (fieldNameNode == null)
+        throw new SchemaParseException("No param name: "+field);
+      JsonNode fieldTypeNode = field.getFieldValue("type");
+      if (fieldTypeNode == null)
+        throw new SchemaParseException("No param type: "+field);
+      fields.put(fieldNameNode.getTextValue(),Schema.parse(fieldTypeNode,types));
     }
     Schema request = Schema.create(fields);
     

Modified: hadoop/avro/trunk/src/java/org/apache/avro/Schema.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/Schema.java?rev=769139&r1=769138&r2=769139&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/Schema.java (original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/Schema.java Mon Apr 27 20:13:31 2009
@@ -235,17 +235,18 @@
       StringBuilder buffer = new StringBuilder();
       buffer.append("{\"type\": \""+(isError?"error":"record")+"\", "
                     +(name==null?"":"\"name\": \""+name+"\", ")
-                    +"\"fields\": {");
+                    +"\"fields\": [");
       int count = 0;
       for (Map.Entry<String, Schema> entry : fieldSchemas) {
-        buffer.append("\"");
+        buffer.append("{\"name\": \"");
         buffer.append(entry.getKey());
-        buffer.append("\": ");
+        buffer.append("\", \"type\": ");
         buffer.append(entry.getValue().toString(names));
+        buffer.append("}");
         if (++count < fields.size())
           buffer.append(", ");
       }
-      buffer.append("}}");
+      buffer.append("]}");
       return buffer.toString();
     }
   }
@@ -389,16 +390,11 @@
   private static final NullSchema    NULL_SCHEMA =    new NullSchema();
 
   public static Schema parse(File file) throws IOException {
-    InputStream in = new FileInputStream(file);
+    JsonParser parser = FACTORY.createJsonParser(file);
     try {
-      JsonParser parser = FACTORY.createJsonParser(in);
-      try {
-        return Schema.parse(MAPPER.read(parser), new Names());
-      } catch (JsonParseException e) {
-        throw new SchemaParseException(e);
-      }
-    } finally {
-      in.close();
+      return Schema.parse(MAPPER.read(parser), new Names());
+    } catch (JsonParseException e) {
+      throw new SchemaParseException(e);
     }
   }
 
@@ -468,10 +464,17 @@
         RecordSchema result =
           new RecordSchema(name, space, type.equals("error"));
         if (name != null) names.put(name, result);
-        JsonNode props = schema.getFieldValue("fields");
-        for (Iterator<String> i = props.getFieldNames(); i.hasNext();) {
-          String prop = i.next();
-          fields.put(prop, parse(props.getFieldValue(prop), names));
+        JsonNode fieldsNode = schema.getFieldValue("fields");
+        if (fieldsNode == null || !fieldsNode.isArray())
+          throw new SchemaParseException("Record has no fields: "+schema);
+        for (JsonNode field : fieldsNode) {
+          JsonNode fieldNameNode = field.getFieldValue("name");
+          if (fieldNameNode == null)
+            throw new SchemaParseException("No field name: "+field);
+          JsonNode fieldTypeNode = field.getFieldValue("type");
+          if (fieldTypeNode == null)
+            throw new SchemaParseException("No field type: "+field);
+          fields.put(fieldNameNode.getTextValue(), parse(fieldTypeNode, names));
         }
         result.setFields(fields);
         return result;

Modified: hadoop/avro/trunk/src/java/org/apache/avro/ipc/Responder.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/ipc/Responder.java?rev=769139&r1=769138&r2=769139&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/ipc/Responder.java (original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/ipc/Responder.java Mon Apr 27 20:13:31 2009
@@ -85,6 +85,9 @@
         response = respond(m, request);
       } catch (AvroRemoteException e) {
         error = e;
+      } catch (Exception e) {
+        LOG.warn("application error", e);
+        error = new AvroRemoteException(new Utf8(e.toString()));
       }
 
       out.writeBoolean(error != null);
@@ -94,7 +97,7 @@
         writeError(m.getErrors(), error, out);
 
     } catch (AvroRuntimeException e) {            // system error
-      LOG.warn("unexpected error", e);
+      LOG.warn("system error", e);
       error = new AvroRemoteException(e);
       out = new ByteBufferValueWriter();
       out.writeBoolean(true);

Modified: hadoop/avro/trunk/src/py/avro/ipc.py
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/py/avro/ipc.py?rev=769139&r1=769138&r2=769139&view=diff
==============================================================================
--- hadoop/avro/trunk/src/py/avro/ipc.py (original)
+++ hadoop/avro/trunk/src/py/avro/ipc.py Mon Apr 27 20:13:31 2009
@@ -156,6 +156,8 @@
         response = self.invoke(m, req)
       except AvroRemoteException, e:
         error = e
+      except Exception, e:
+        error = AvroRemoteException(unicode(e.__str__()))
       vwriter.writeboolean(error is not None)
       if error is None:
         self.writeresponse(m.getresponse(), response, vwriter)

Modified: hadoop/avro/trunk/src/py/avro/protocol.py
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/py/avro/protocol.py?rev=769139&r1=769138&r2=769139&view=diff
==============================================================================
--- hadoop/avro/trunk/src/py/avro/protocol.py (original)
+++ hadoop/avro/trunk/src/py/avro/protocol.py Mon Apr 27 20:13:31 2009
@@ -15,7 +15,7 @@
 #limitations under the License.
 
 import cStringIO
-import jsonparser
+import simplejson
 import avro.schema as schema
 
 #The version implemented.
@@ -67,17 +67,18 @@
 
     def __str__(self):
       str = cStringIO.StringIO()
-      str.write("{\"request\": {")
+      str.write("{\"request\": [")
       count = 0
       for k,v in self.__request.getfields():
-        str.write("\"")
+        str.write("{\"name\": \"")
         str.write(k)
-        str.write("\": ")
+        str.write("\", \"type\": ")
         str.write(v.str(self.__proto.gettypes()))
+        str.write("}")
         count+=1
         if count < len(self.__request.getfields()):
           str.write(", ")
-      str.write("}, \"response\": "+
+      str.write("], \"response\": "+
                 self.__response.str(self.__proto.gettypes()))
       list = self.__errors.getelementtypes()
       if len(list) > 1:
@@ -143,8 +144,14 @@
     if res is None:
       raise SchemaParseException("No response specified: "+obj.__str__())
     fields = dict()
-    for k,v in req.items():
-      fields[k] = schema._parse(v, self.__types)
+    for field in req:
+      fieldname = field.get("name")
+      if fieldname is None:
+        raise SchemaParseException("No param name: "+field.__str__())
+      fieldtype = field.get("type")
+      if fieldtype is None:
+        raise SchemaParseException("No param type: "+field.__str__())
+      fields[fieldname] = schema._parse(fieldtype, self.__types)
     request = schema._RecordSchema(list(fields.iteritems()))
     response = schema._parse(res, self.__types)
 
@@ -169,5 +176,5 @@
 def parse(json_string):
   """Constructs the Protocol from the json text."""
   protocol = Protocol()
-  protocol._parse(jsonparser.parse(json_string))
+  protocol._parse(simplejson.loads(json_string))
   return protocol
\ No newline at end of file

Modified: hadoop/avro/trunk/src/py/avro/schema.py
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/py/avro/schema.py?rev=769139&r1=769138&r2=769139&view=diff
==============================================================================
--- hadoop/avro/trunk/src/py/avro/schema.py (original)
+++ hadoop/avro/trunk/src/py/avro/schema.py Mon Apr 27 20:13:31 2009
@@ -29,8 +29,7 @@
   A boolean."""
 
 import cStringIO
-import odict
-import avro.jsonparser as jsonparser
+import simplejson
 
 #The schema types
 STRING, BYTES, INT, LONG, FLOAT, DOUBLE, BOOLEAN, NULL, ARRAY, MAP, UNION, RECORD = range(12)
@@ -144,17 +143,18 @@
       str.write("\"name\": \""+self.__name+"\", ")
     #if self.__namespace is not None:
       #str.write("\"namespace\": \""+self.__namespace+"\", ")
-    str.write("\"fields\": {")
+    str.write("\"fields\": [")
     count=0
     for k,v in self.__fields:
-      str.write("\"")
+      str.write("{\"name\": \"")
       str.write(k)
-      str.write("\": ")
+      str.write("\", \"type\": ")
       str.write(v.str(names))
+      str.write("}")
       count+=1
       if count < len(self.__fields):
         str.write(",")
-    str.write("}}")
+    str.write("]}")
     return str.getvalue()
 
   def __eq__(self, other, seen={}):
@@ -338,8 +338,17 @@
       schema = _RecordSchema(fields, name, namespace, type == "error")
       if name is not None:
         names[name] = schema
-      for k,v in obj.get("fields").items():
-        fields.append((k, _parse(v, names)))
+      fieldsnode = obj.get("fields")
+      if fieldsnode is None:
+        raise SchemaParseException("Record has no fields: "+obj.__str__())
+      for field in fieldsnode:
+        fieldname = field.get("name")
+        if fieldname is None:
+          raise SchemaParseException("No field name: "+field.__str__())
+        fieldtype = field.get("type")
+        if fieldtype is None:
+          raise SchemaParseException("No field type: "+field.__str__())
+        fields.append((fieldname, _parse(fieldtype, names)))
       return schema
     elif type == "array":
       return _ArraySchema(_parse(obj.get("items"), names))
@@ -362,5 +371,5 @@
 
 def parse(json_string):
   """Constructs the Schema from the json text."""
-  dict = jsonparser.parse(json_string)
+  dict = simplejson.loads(json_string)
   return _parse(dict, _Names())

Modified: hadoop/avro/trunk/src/test/java/org/apache/avro/TestDataFile.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/TestDataFile.java?rev=769139&r1=769138&r2=769139&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/TestDataFile.java (original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/TestDataFile.java Mon Apr 27 20:13:31
2009
@@ -45,9 +45,9 @@
   private static final long SEED = System.currentTimeMillis();
 
   private static final String SCHEMA_JSON =
-    "{\"type\": \"record\", \"fields\":{"
-    +"\"stringField\":\"string\","
-    +"\"longField\": \"long\"}}";
+    "{\"type\": \"record\", \"fields\": ["
+    +"{\"name\":\"stringField\", \"type\":\"string\"},"
+    +"{\"name\":\"longField\", \"type\":\"long\"}]}";
   private static final Schema SCHEMA = Schema.parse(SCHEMA_JSON);
 
   public void testGenericWrite() throws IOException {

Modified: hadoop/avro/trunk/src/test/java/org/apache/avro/TestSchema.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/TestSchema.java?rev=769139&r1=769138&r2=769139&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/TestSchema.java (original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/TestSchema.java Mon Apr 27 20:13:31 2009
@@ -70,21 +70,24 @@
   }
 
   public void testRecord() throws Exception {
-    check("{\"type\":\"record\",\"fields\":{\"f\":\"string\"}}");
+    check("{\"type\":\"record\",\"fields\":["
+          +"{\"name\":\"f\", \"type\":\"string\"}]}");
   }
 
   public void testRecursive() throws Exception {
-    check("{\"type\": \"record\", \"name\": \"Node\", \"fields\": {"
-          +"\"label\": \"string\","
-          +"\"children\": {\"type\": \"array\", \"items\": \"Node\" }}}",
+    check("{\"type\": \"record\", \"name\": \"Node\", \"fields\": ["
+          +"{\"name\":\"label\", \"type\":\"string\"},"
+          +"{\"name\":\"children\", \"type\":"
+          +"{\"type\": \"array\", \"items\": \"Node\" }}]}",
           false);
   }
 
   public void testLisp() throws Exception {
-    check("{\"type\": \"record\", \"name\": \"Lisp\", \"fields\": {"
-          +"\"value\": [\"null\", \"string\","
-          +"{\"type\": \"record\", \"name\": \"Cons\", \"fields\": {"
-          +"\"car\": \"Lisp\", \"cdr\": \"Lisp\"}}]}}",
+    check("{\"type\": \"record\", \"name\": \"Lisp\", \"fields\": ["
+          +"{\"name\":\"value\", \"type\":[\"null\", \"string\","
+          +"{\"type\": \"record\", \"name\": \"Cons\", \"fields\": ["
+          +"{\"name\":\"car\", \"type\":\"Lisp\"},"
+          +"{\"name\":\"cdr\", \"type\":\"Lisp\"}]}]}]}",
           false);
   }
 

Modified: hadoop/avro/trunk/src/test/py/testio.py
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/py/testio.py?rev=769139&r1=769138&r2=769139&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/py/testio.py (original)
+++ hadoop/avro/trunk/src/test/py/testio.py Mon Apr 27 20:13:31 2009
@@ -127,24 +127,27 @@
           "\"string\"}")
 
   def testRecord(self):
-    self.check("{\"type\":\"record\",\"fields\":{\"f\":\"string\"," + 
-                "\"fb\":\"bytes\"}}")
+    self.check("{\"type\":\"record\",\"fields\":[{\"name\":\"f\", \"type\":" +
+               "\"string\"}, {\"name\":\"fb\", \"type\":\"bytes\"}]}")
 
   def testRecursive(self):
-    self.check("{\"type\": \"record\", \"name\": \"Node\", \"fields\": {"
-      +"\"label\": \"string\","
-      +"\"children\": {\"type\": \"array\", \"items\": \"Node\" }}}")
+    self.check("{\"type\": \"record\", \"name\": \"Node\", \"fields\": ["
+          +"{\"name\":\"label\", \"type\":\"string\"},"
+          +"{\"name\":\"children\", \"type\":"
+          +"{\"type\": \"array\", \"items\": \"Node\" }}]}")
 
   def testLisp(self):
-    self.check("{\"type\": \"record\", \"name\": \"Lisp\", \"fields\": {"
-      +"\"value\": [\"null\", \"string\","
-      +"{\"type\": \"record\", \"name\": \"Cons\", \"fields\": {"
-      +"\"car\": \"Lisp\", \"cdr\": \"Lisp\"}}]}}")
+    self.check("{\"type\": \"record\", \"name\": \"Lisp\", \"fields\": ["
+          +"{\"name\":\"value\", \"type\":[\"null\", \"string\","
+          +"{\"type\": \"record\", \"name\": \"Cons\", \"fields\": ["
+          +"{\"name\":\"car\", \"type\":\"Lisp\"},"
+          +"{\"name\":\"cdr\", \"type\":\"Lisp\"}]}]}]}")
 
   def testUnion(self):
     self.check("[\"string\", \"null\", \"long\", "
-      +"{\"type\": \"record\", \"name\": \"Cons\", \"fields\": {"
-      +"\"car\": \"string\", \"cdr\": \"string\"}}]")
+      +"{\"type\": \"record\", \"name\": \"Cons\", \"fields\": ["
+      +"{\"name\":\"car\", \"type\":\"string\"}," 
+      +"{\"name\":\"cdr\", \"type\":\"string\"}]}]")
 
   def check(self, string):
     schm = schema.parse(string)

Modified: hadoop/avro/trunk/src/test/py/testioreflect.py
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/py/testioreflect.py?rev=769139&r1=769138&r2=769139&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/py/testioreflect.py (original)
+++ hadoop/avro/trunk/src/test/py/testioreflect.py Mon Apr 27 20:13:31 2009
@@ -52,8 +52,8 @@
   # the schema MUST have name
   def testRecord(self):
     self.check(
-    "{\"type\":\"record\",\"name\":\"TestRec\",\"fields\":{\"f\":\"string\"," + 
-                "\"fb\":\"bytes\"}}")
+    "{\"type\":\"record\",\"name\":\"TestRec\",\"fields\":[{\"name\":\"f\"," +
+       "\"type\":\"string\"}, {\"name\":\"fb\", \"type\":\"bytes\"}]}")
 
   def __init__(self, methodName):
     testio.TestSchema.__init__(self, methodName, dyvalidator, ReflectDWriter,

Modified: hadoop/avro/trunk/src/test/schemata/fs-data.js
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/schemata/fs-data.js?rev=769139&r1=769138&r2=769139&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/schemata/fs-data.js (original)
+++ hadoop/avro/trunk/src/test/schemata/fs-data.js Mon Apr 27 20:13:31 2009
@@ -7,7 +7,11 @@
 
  "messages": {
      "read": {
-         "request": {"block": "string", "start": "long", "length": "long" },
+         "request": [
+             {"name": "block", "type": "string"},
+             {"name": "start", "type": "long"},
+             {"name": "length", "type": "long"}
+         ],
          "response": "bytes"
      }
 

Modified: hadoop/avro/trunk/src/test/schemata/interop.js
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/schemata/interop.js?rev=769139&r1=769138&r2=769139&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/schemata/interop.js (original)
+++ hadoop/avro/trunk/src/test/schemata/interop.js Mon Apr 27 20:13:31 2009
@@ -1,19 +1,24 @@
 {"type": "record", "name":"Interop", "namespace": "org.apache.avro",
-  "fields": {
-      "intField": "int",
-      "longField": "long",
-      "stringField":"string",
-      "boolField":"boolean",
-      "floatField":"float",
-      "doubleField":"double",
-      "bytesField":"bytes",
-      "nullField":"null",
-      "arrayField":{"type":"array", "items": "double"},
-      "mapField":{"type":"map", "keys": "long", "values": 
-        {"type": "record", "name": "Foo", "fields":{"label": "string"}}},
-      "unionField": ["boolean", "double", {"type":"array","items": "bytes"}],
-      "recordField":{"type": "record", "name": "Node",
-        "fields":{"label": "string","children": {"type":
-        "array", "items": "Node" }}}
-      }
+  "fields": [
+      {"name": "intField", "type": "int"},
+      {"name": "longField", "type": "long"},
+      {"name": "stringField", "type": "string"},
+      {"name": "boolField", "type": "boolean"},
+      {"name": "floatField", "type": "float"},
+      {"name": "doubleField", "type": "double"},
+      {"name": "bytesField", "type": "bytes"},
+      {"name": "nullField", "type": "null"},
+      {"name": "arrayField", "type": {"type": "array", "items": "double"}},
+      {"name": "mapField", "type":
+       {"type": "map", "keys": "long", "values":
+        {"type": "record", "name": "Foo",
+         "fields": [{"name": "label", "type": "string"}]}}},
+      {"name": "unionField", "type":
+       ["boolean", "double", {"type": "array", "items": "bytes"}]},
+      {"name": "recordField", "type":
+       {"type": "record", "name": "Node",
+        "fields": [
+            {"name": "label", "type": "string"},
+            {"name": "children", "type": {"type": "array", "items": "Node"}}]}}
+  ]
 }

Modified: hadoop/avro/trunk/src/test/schemata/test.js
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/schemata/test.js?rev=769139&r1=769138&r2=769139&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/schemata/test.js (original)
+++ hadoop/avro/trunk/src/test/schemata/test.js Mon Apr 27 20:13:31 2009
@@ -3,34 +3,37 @@
 
  "types": [
      {"name": "TestRecord", "type": "record",
-      "fields": {
-          "name": "string"
-      }
+      "fields": [
+          {"name": "name", "type": "string"}
+      ]
      },
-      
-     {"name": "TestError", "type": "error", "fields": {"message": "string"}}
+
+     {"name": "TestError", "type": "error", "fields": [
+         {"name": "message", "type": "string"}
+      ]
+     }
 
  ],
 
  "messages": {
 
      "hello": {
-         "request": {"greeting": "string" },
+         "request": [{"name": "greeting", "type": "string"}],
          "response": "string"
      },
 
      "echo": {
-         "request": {"record": "TestRecord" },
+         "request": [{"name": "record", "type": "TestRecord"}],
          "response": "TestRecord"
      },
 
      "echoBytes": {
-         "request": {"data": "bytes" },
+         "request": [{"name": "data", "type": "bytes"}],
          "response": "bytes"
      },
 
      "error": {
-         "request": {},
+         "request": [],
          "response": "null",
          "errors": ["TestError"]
      }



Mime
View raw message