lucy-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From nwelln...@apache.org
Subject lucy git commit: String-only hash keys
Date Wed, 15 Apr 2015 14:06:10 GMT
Repository: lucy
Updated Branches:
  refs/heads/CLOWNFISH-7-string-only-hash-keys [created] 8e232df50


String-only hash keys


Project: http://git-wip-us.apache.org/repos/asf/lucy/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucy/commit/8e232df5
Tree: http://git-wip-us.apache.org/repos/asf/lucy/tree/8e232df5
Diff: http://git-wip-us.apache.org/repos/asf/lucy/diff/8e232df5

Branch: refs/heads/CLOWNFISH-7-string-only-hash-keys
Commit: 8e232df504bf46544c8a943ab77c9f3ca5237ec5
Parents: 6c78b53
Author: Nick Wellnhofer <wellnhofer@aevum.de>
Authored: Tue Apr 14 16:46:00 2015 +0200
Committer: Nick Wellnhofer <wellnhofer@aevum.de>
Committed: Wed Apr 15 15:54:06 2015 +0200

----------------------------------------------------------------------
 c/src/Lucy/Document/Doc.c                     |  4 +-
 c/src/Lucy/Index/Inverter.c                   |  6 +--
 core/Lucy/Analysis/SnowballStopFilter.c       |  2 +-
 core/Lucy/Index/BackgroundMerger.c            | 16 +++---
 core/Lucy/Index/DeletionsReader.c             |  3 +-
 core/Lucy/Index/DeletionsWriter.c             | 10 ++--
 core/Lucy/Index/DocVector.c                   | 16 +++---
 core/Lucy/Index/FilePurger.c                  | 20 ++++----
 core/Lucy/Index/IndexReader.c                 |  9 ++--
 core/Lucy/Index/Indexer.c                     |  4 +-
 core/Lucy/Index/LexiconWriter.c               |  5 +-
 core/Lucy/Index/PolyReader.c                  | 12 ++---
 core/Lucy/Index/SegLexicon.c                  |  2 +-
 core/Lucy/Index/SegReader.c                   |  6 +--
 core/Lucy/Index/SegWriter.c                   |  6 +--
 core/Lucy/Index/Segment.c                     | 12 ++---
 core/Lucy/Index/Snapshot.c                    |  6 +--
 core/Lucy/Index/SortReader.c                  | 10 ++--
 core/Lucy/Index/SortWriter.c                  |  7 ++-
 core/Lucy/Plan/Schema.c                       | 32 ++++++------
 core/Lucy/Store/CompoundFileReader.c          | 18 +++----
 core/Lucy/Store/CompoundFileWriter.c          |  4 +-
 core/Lucy/Store/FSFolder.c                    | 10 ++--
 core/Lucy/Store/Folder.c                      |  2 +-
 core/Lucy/Store/RAMFolder.c                   | 32 ++++++------
 core/Lucy/Test/Analysis/TestSnowballStemmer.c |  2 +-
 core/Lucy/Test/Store/TestCompoundFileWriter.c |  2 +-
 core/Lucy/Test/Util/TestFreezer.c             |  3 +-
 core/Lucy/Test/Util/TestJson.c                | 18 +------
 core/Lucy/Util/Freezer.c                      | 59 +++++-----------------
 core/Lucy/Util/Json.c                         |  4 +-
 core/Lucy/Util/Json/JsonParser.y              |  8 +--
 go/lucy/index.go                              |  2 +-
 33 files changed, 145 insertions(+), 207 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucy/blob/8e232df5/c/src/Lucy/Document/Doc.c
----------------------------------------------------------------------
diff --git a/c/src/Lucy/Document/Doc.c b/c/src/Lucy/Document/Doc.c
index 3b832d5..79f1728 100644
--- a/c/src/Lucy/Document/Doc.c
+++ b/c/src/Lucy/Document/Doc.c
@@ -60,7 +60,7 @@ Doc_Get_Size_IMP(Doc *self) {
 void
 Doc_Store_IMP(Doc *self, String *field, Obj *value) {
     Hash *hash = (Hash*)Doc_IVARS(self)->fields;
-    Hash_Store(hash, (Obj *)field, INCREF(value));
+    Hash_Store(hash, field, INCREF(value));
 }
 
 void
@@ -82,7 +82,7 @@ Doc_Deserialize_IMP(Doc *self, InStream *instream) {
 Obj*
 Doc_Extract_IMP(Doc *self, String *field) {
     Hash *hash = (Hash*)Doc_IVARS(self)->fields;
-    return INCREF(Hash_Fetch(hash, (Obj *)field));
+    return INCREF(Hash_Fetch(hash, field));
 }
 
 void*

http://git-wip-us.apache.org/repos/asf/lucy/blob/8e232df5/c/src/Lucy/Index/Inverter.c
----------------------------------------------------------------------
diff --git a/c/src/Lucy/Index/Inverter.c b/c/src/Lucy/Index/Inverter.c
index c235a66..29521d2 100644
--- a/c/src/Lucy/Index/Inverter.c
+++ b/c/src/Lucy/Index/Inverter.c
@@ -69,9 +69,9 @@ Inverter_Invert_Doc_IMP(Inverter *self, Doc *doc) {
 
     // Extract and invert the doc's fields.
     while (num_keys--) {
-        Obj *key, *obj;
-        Hash_Next(fields, &key, &obj);
-        String *field = (String*)CERTIFY(key, STRING);
+        String *field;
+        Obj    *obj;
+        Hash_Next(fields, &field, &obj);
         InverterEntry *inventry = S_fetch_entry(ivars, field);
         InverterEntryIVARS *inventry_ivars = InvEntry_IVARS(inventry);
         FieldType *type = inventry_ivars->type;

http://git-wip-us.apache.org/repos/asf/lucy/blob/8e232df5/core/Lucy/Analysis/SnowballStopFilter.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Analysis/SnowballStopFilter.c b/core/Lucy/Analysis/SnowballStopFilter.c
index a35de42..c1fb1c5 100644
--- a/core/Lucy/Analysis/SnowballStopFilter.c
+++ b/core/Lucy/Analysis/SnowballStopFilter.c
@@ -145,7 +145,7 @@ SnowStop_gen_stoplist(String *language) {
     for (uint32_t i = 0; words[i] != NULL; i++) {
         char *word = (char*)words[i];
         String *stop = Str_new_wrap_trusted_utf8(word, strlen(word));
-        Hash_Store(stoplist, (Obj*)stop, (Obj*)CFISH_TRUE);
+        Hash_Store(stoplist, stop, (Obj*)CFISH_TRUE);
         DECREF(stop);
     }
     return (Hash*)stoplist;

http://git-wip-us.apache.org/repos/asf/lucy/blob/8e232df5/core/Lucy/Index/BackgroundMerger.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/BackgroundMerger.c b/core/Lucy/Index/BackgroundMerger.c
index cfbdfc7..d39480d 100644
--- a/core/Lucy/Index/BackgroundMerger.c
+++ b/core/Lucy/Index/BackgroundMerger.c
@@ -238,7 +238,7 @@ S_maybe_merge(BackgroundMerger *self) {
                                 SegReader_Doc_Max(seg_reader),
                                 (int32_t)doc_count);
 
-        Hash_Store(ivars->doc_maps, (Obj*)seg_name, (Obj*)doc_map);
+        Hash_Store(ivars->doc_maps, seg_name, (Obj*)doc_map);
         SegWriter_Merge_Segment(ivars->seg_writer, seg_reader, doc_map);
         DECREF(deletions);
     }
@@ -263,7 +263,7 @@ S_merge_updated_deletions(BackgroundMerger *self) {
     for (uint32_t i = 0, max = VA_Get_Size(new_seg_readers); i < max; i++) {
         SegReader *seg_reader = (SegReader*)VA_Fetch(new_seg_readers, i);
         String    *seg_name   = SegReader_Get_Seg_Name(seg_reader);
-        Hash_Store(new_segs, (Obj*)seg_name, INCREF(seg_reader));
+        Hash_Store(new_segs, seg_name, INCREF(seg_reader));
     }
 
     for (uint32_t i = 0, max = VA_Get_Size(old_seg_readers); i < max; i++) {
@@ -271,10 +271,10 @@ S_merge_updated_deletions(BackgroundMerger *self) {
         String    *seg_name   = SegReader_Get_Seg_Name(seg_reader);
 
         // If this segment was merged away...
-        if (Hash_Fetch(ivars->doc_maps, (Obj*)seg_name)) {
+        if (Hash_Fetch(ivars->doc_maps, seg_name)) {
             SegReader *new_seg_reader
                 = (SegReader*)CERTIFY(
-                      Hash_Fetch(new_segs, (Obj*)seg_name),
+                      Hash_Fetch(new_segs, seg_name),
                       SEGREADER);
             int32_t old_del_count = SegReader_Del_Count(seg_reader);
             int32_t new_del_count = SegReader_Del_Count(new_seg_reader);
@@ -287,7 +287,7 @@ S_merge_updated_deletions(BackgroundMerger *self) {
                 if (!updated_deletions) {
                     updated_deletions = Hash_new(max);
                 }
-                Hash_Store(updated_deletions, (Obj*)seg_name,
+                Hash_Store(updated_deletions, seg_name,
                            (Obj*)DelReader_Iterator(del_reader));
             }
         }
@@ -333,12 +333,10 @@ S_merge_updated_deletions(BackgroundMerger *self) {
         if (offset == INT32_MAX) { THROW(ERR, "Failed sanity check"); }
 
         Hash_Iterate(updated_deletions);
-        while (Hash_Next(updated_deletions,
-                         (Obj**)&seg_name, (Obj**)&deletions)
-              ) {
+        while (Hash_Next(updated_deletions, &seg_name, (Obj**)&deletions)) {
             I32Array *doc_map
                 = (I32Array*)CERTIFY(
-                      Hash_Fetch(ivars->doc_maps, (Obj*)seg_name),
+                      Hash_Fetch(ivars->doc_maps, seg_name),
                       I32ARRAY);
             int32_t del;
             while (0 != (del = Matcher_Next(deletions))) {

http://git-wip-us.apache.org/repos/asf/lucy/blob/8e232df5/core/Lucy/Index/DeletionsReader.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/DeletionsReader.c b/core/Lucy/Index/DeletionsReader.c
index 8c5beff..08f466e 100644
--- a/core/Lucy/Index/DeletionsReader.c
+++ b/core/Lucy/Index/DeletionsReader.c
@@ -172,8 +172,7 @@ DefDelReader_Read_Deletions_IMP(DefaultDeletionsReader *self) {
         if (metadata) {
             Hash *files = (Hash*)CERTIFY(
                               Hash_Fetch_Utf8(metadata, "files", 5), HASH);
-            Hash *seg_files_data
-                = (Hash*)Hash_Fetch(files, (Obj*)my_seg_name);
+            Hash *seg_files_data = (Hash*)Hash_Fetch(files, my_seg_name);
             if (seg_files_data) {
                 Obj *count = (Obj*)CERTIFY(
                                  Hash_Fetch_Utf8(seg_files_data, "count", 5),

http://git-wip-us.apache.org/repos/asf/lucy/blob/8e232df5/core/Lucy/Index/DeletionsWriter.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/DeletionsWriter.c b/core/Lucy/Index/DeletionsWriter.c
index fcd791f..77e4974 100644
--- a/core/Lucy/Index/DeletionsWriter.c
+++ b/core/Lucy/Index/DeletionsWriter.c
@@ -111,7 +111,7 @@ DefDelWriter_init(DefaultDeletionsWriter *self, Schema *schema,
         }
         VA_Store(ivars->bit_vecs, i, (Obj*)bit_vec);
         Hash_Store(ivars->name_to_tick,
-                   (Obj*)SegReader_Get_Seg_Name(seg_reader),
+                   SegReader_Get_Seg_Name(seg_reader),
                    (Obj*)Int32_new(i));
     }
 
@@ -191,7 +191,7 @@ DefDelWriter_Metadata_IMP(DefaultDeletionsWriter *self) {
                             (Obj*)Str_newf("%u32", (uint32_t)BitVec_Count(deldocs)));
             Hash_Store_Utf8(mini_meta, "filename", 8,
                             (Obj*)S_del_filename(self, seg_reader));
-            Hash_Store(files, (Obj*)Seg_Get_Name(segment), (Obj*)mini_meta);
+            Hash_Store(files, Seg_Get_Name(segment), (Obj*)mini_meta);
         }
     }
     Hash_Store_Utf8(metadata, "files", 5, (Obj*)files);
@@ -213,7 +213,7 @@ DefDelWriter_Seg_Deletions_IMP(DefaultDeletionsWriter *self,
     Segment *segment      = SegReader_Get_Segment(seg_reader);
     String  *seg_name     = Seg_Get_Name(segment);
     Integer32 *tick_obj   = (Integer32*)Hash_Fetch(ivars->name_to_tick,
-                                                   (Obj*)seg_name);
+                                                   seg_name);
     int32_t tick          = tick_obj ? Int32_Get_Value(tick_obj) : 0;
     SegReader *candidate  = tick_obj
                             ? (SegReader*)VA_Fetch(ivars->seg_readers, tick)
@@ -240,7 +240,7 @@ DefDelWriter_Seg_Del_Count_IMP(DefaultDeletionsWriter *self,
                                String *seg_name) {
     DefaultDeletionsWriterIVARS *const ivars = DefDelWriter_IVARS(self);
     Integer32 *tick
-        = (Integer32*)Hash_Fetch(ivars->name_to_tick, (Obj*)seg_name);
+        = (Integer32*)Hash_Fetch(ivars->name_to_tick, seg_name);
     BitVector *deldocs = tick
                          ? (BitVector*)VA_Fetch(ivars->bit_vecs, Int32_Get_Value(tick))
                          : NULL;
@@ -353,7 +353,7 @@ DefDelWriter_Merge_Segment_IMP(DefaultDeletionsWriter *self,
             String *seg;
             Hash *mini_meta;
             Hash_Iterate(files);
-            while (Hash_Next(files, (Obj**)&seg, (Obj**)&mini_meta)) {
+            while (Hash_Next(files, &seg, (Obj**)&mini_meta)) {
 
                 /* Find the segment the deletions from the SegReader
                  * we're adding correspond to.  If it's gone, we don't

http://git-wip-us.apache.org/repos/asf/lucy/blob/8e232df5/core/Lucy/Index/DocVector.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/DocVector.c b/core/Lucy/Index/DocVector.c
index 0cb0aae..9e7fa72 100644
--- a/core/Lucy/Index/DocVector.c
+++ b/core/Lucy/Index/DocVector.c
@@ -76,13 +76,13 @@ void
 DocVec_Add_Field_Buf_IMP(DocVector *self, String *field,
                          ByteBuf *field_buf) {
     DocVectorIVARS *const ivars = DocVec_IVARS(self);
-    Hash_Store(ivars->field_bufs, (Obj*)field, INCREF(field_buf));
+    Hash_Store(ivars->field_bufs, field, INCREF(field_buf));
 }
 
 ByteBuf*
 DocVec_Field_Buf_IMP(DocVector *self, String *field) {
     DocVectorIVARS *const ivars = DocVec_IVARS(self);
-    return (ByteBuf*)Hash_Fetch(ivars->field_bufs, (Obj*)field);
+    return (ByteBuf*)Hash_Fetch(ivars->field_bufs, field);
 }
 
 VArray*
@@ -95,22 +95,21 @@ TermVector*
 DocVec_Term_Vector_IMP(DocVector *self, String *field,
                        String *term_text) {
     DocVectorIVARS *const ivars = DocVec_IVARS(self);
-    Hash *field_vector = (Hash*)Hash_Fetch(ivars->field_vectors, (Obj*)field);
+    Hash *field_vector = (Hash*)Hash_Fetch(ivars->field_vectors, field);
 
     // If no cache hit, try to fill cache.
     if (field_vector == NULL) {
-        ByteBuf *field_buf
-            = (ByteBuf*)Hash_Fetch(ivars->field_bufs, (Obj*)field);
+        ByteBuf *field_buf = (ByteBuf*)Hash_Fetch(ivars->field_bufs, field);
 
         // Bail if there's no content or the field isn't highlightable.
         if (field_buf == NULL) { return NULL; }
 
         field_vector = S_extract_tv_cache(field_buf);
-        Hash_Store(ivars->field_vectors, (Obj*)field, (Obj*)field_vector);
+        Hash_Store(ivars->field_vectors, field, (Obj*)field_vector);
     }
 
     // Get a buf for the term text or bail.
-    ByteBuf *tv_buf = (ByteBuf*)Hash_Fetch(field_vector, (Obj*)term_text);
+    ByteBuf *tv_buf = (ByteBuf*)Hash_Fetch(field_vector, term_text);
     if (tv_buf == NULL) {
         return NULL;
     }
@@ -148,8 +147,7 @@ S_extract_tv_cache(ByteBuf *field_buf) {
 
         // Store the $text => $posdata pair in the output hash.
         String *text = CB_To_String(text_buf);
-        Hash_Store(tv_cache, (Obj*)text,
-                   (Obj*)BB_new_bytes(bookmark_ptr, len));
+        Hash_Store(tv_cache, text, (Obj*)BB_new_bytes(bookmark_ptr, len));
         DECREF(text);
     }
     DECREF(text_buf);

http://git-wip-us.apache.org/repos/asf/lucy/blob/8e232df5/core/Lucy/Index/FilePurger.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/FilePurger.c b/core/Lucy/Index/FilePurger.c
index c283d5e..9cc3d8c 100644
--- a/core/Lucy/Index/FilePurger.c
+++ b/core/Lucy/Index/FilePurger.c
@@ -96,10 +96,10 @@ FilePurger_Purge_IMP(FilePurger *self) {
         VA_Sort(purgables, NULL, NULL);
         for (uint32_t i = VA_Get_Size(purgables); i--;) {
             String *entry = (String*)VA_Fetch(purgables, i);
-            if (Hash_Fetch(ivars->disallowed, (Obj*)entry)) { continue; }
+            if (Hash_Fetch(ivars->disallowed, entry)) { continue; }
             if (!Folder_Delete(folder, entry)) {
                 if (Folder_Exists(folder, entry)) {
-                    Hash_Store(failures, (Obj*)entry, (Obj*)CFISH_TRUE);
+                    Hash_Store(failures, entry, (Obj*)CFISH_TRUE);
                 }
             }
         }
@@ -113,7 +113,7 @@ FilePurger_Purge_IMP(FilePurger *self) {
                 VArray *entries = Snapshot_List(snapshot);
                 for (uint32_t j = VA_Get_Size(entries); j--;) {
                     String *entry = (String*)VA_Fetch(entries, j);
-                    if (Hash_Fetch(failures, (Obj*)entry)) {
+                    if (Hash_Fetch(failures, entry)) {
                         snapshot_has_failures = true;
                         break;
                     }
@@ -162,13 +162,13 @@ S_zap_dead_merge(FilePurger *self, Hash *candidates) {
                     THROW(ERR, "Can't open segment dir '%o'", cutoff_seg);
                 }
 
-                Hash_Store(candidates, (Obj*)cutoff_seg, (Obj*)CFISH_TRUE);
-                Hash_Store(candidates, (Obj*)merge_json, (Obj*)CFISH_TRUE);
+                Hash_Store(candidates, cutoff_seg, (Obj*)CFISH_TRUE);
+                Hash_Store(candidates, (String*)merge_json, (Obj*)CFISH_TRUE);
                 while (DH_Next(dh)) {
                     // TODO: recursively delete subdirs within seg dir.
                     String *entry = DH_Get_Entry(dh);
                     String *filepath = Str_newf("%o/%o", cutoff_seg, entry);
-                    Hash_Store(candidates, (Obj*)filepath, (Obj*)CFISH_TRUE);
+                    Hash_Store(candidates, filepath, (Obj*)CFISH_TRUE);
                     DECREF(filepath);
                     DECREF(entry);
                 }
@@ -240,7 +240,7 @@ S_discover_unused(FilePurger *self, VArray **purgables_ptr,
                 // candidates for deletion.
                 for (uint32_t i = 0, max = VA_Get_Size(referenced); i < max; i++) {
                     String *file = (String*)VA_Fetch(referenced, i);
-                    Hash_Store(candidates, (Obj*)file, (Obj*)CFISH_TRUE);
+                    Hash_Store(candidates, file, (Obj*)CFISH_TRUE);
                 }
                 VA_Push(snapshots, INCREF(snapshot));
             }
@@ -260,7 +260,7 @@ S_discover_unused(FilePurger *self, VArray **purgables_ptr,
     // Eliminate any current files from the list of files to be purged.
     for (uint32_t i = 0, max = VA_Get_Size(spared); i < max; i++) {
         String *filename = (String*)VA_Fetch(spared, i);
-        DECREF(Hash_Delete(candidates, (Obj*)filename));
+        DECREF(Hash_Delete(candidates, filename));
     }
 
     // Pass back purgables and Snapshots.
@@ -276,12 +276,12 @@ S_find_all_referenced(Folder *folder, VArray *entries) {
     Hash *uniqued = Hash_new(VA_Get_Size(entries));
     for (uint32_t i = 0, max = VA_Get_Size(entries); i < max; i++) {
         String *entry = (String*)VA_Fetch(entries, i);
-        Hash_Store(uniqued, (Obj*)entry, (Obj*)CFISH_TRUE);
+        Hash_Store(uniqued, entry, (Obj*)CFISH_TRUE);
         if (Folder_Is_Directory(folder, entry)) {
             VArray *contents = Folder_List_R(folder, entry);
             for (uint32_t j = VA_Get_Size(contents); j--;) {
                 String *sub_entry = (String*)VA_Fetch(contents, j);
-                Hash_Store(uniqued, (Obj*)sub_entry, (Obj*)CFISH_TRUE);
+                Hash_Store(uniqued, sub_entry, (Obj*)CFISH_TRUE);
             }
             DECREF(contents);
         }

http://git-wip-us.apache.org/repos/asf/lucy/blob/8e232df5/core/Lucy/Index/IndexReader.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/IndexReader.c b/core/Lucy/Index/IndexReader.c
index e8d8c65..459bef9 100644
--- a/core/Lucy/Index/IndexReader.c
+++ b/core/Lucy/Index/IndexReader.c
@@ -73,9 +73,7 @@ IxReader_Close_IMP(IndexReader *self) {
         String *key;
         DataReader *component;
         Hash_Iterate(ivars->components);
-        while (Hash_Next(ivars->components, (Obj**)&key,
-                         (Obj**)&component)
-              ) {
+        while (Hash_Next(ivars->components, &key, (Obj**)&component)) {
             if (Obj_Is_A((Obj*)component, DATAREADER)) {
                 DataReader_Close(component);
             }
@@ -110,8 +108,7 @@ IxReader_Get_Components_IMP(IndexReader *self) {
 DataReader*
 IxReader_Obtain_IMP(IndexReader *self, String *api) {
     IndexReaderIVARS *const ivars = IxReader_IVARS(self);
-    DataReader *component
-        = (DataReader*)Hash_Fetch(ivars->components, (Obj*)api);
+    DataReader *component = (DataReader*)Hash_Fetch(ivars->components, api);
     if (!component) {
         THROW(ERR, "No component registered for '%o'", api);
     }
@@ -121,7 +118,7 @@ IxReader_Obtain_IMP(IndexReader *self, String *api) {
 DataReader*
 IxReader_Fetch_IMP(IndexReader *self, String *api) {
     IndexReaderIVARS *const ivars = IxReader_IVARS(self);
-    return (DataReader*)Hash_Fetch(ivars->components, (Obj*)api);
+    return (DataReader*)Hash_Fetch(ivars->components, api);
 }
 
 

http://git-wip-us.apache.org/repos/asf/lucy/blob/8e232df5/core/Lucy/Index/Indexer.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/Indexer.c b/core/Lucy/Index/Indexer.c
index 7db0001..8840bbe 100644
--- a/core/Lucy/Index/Indexer.c
+++ b/core/Lucy/Index/Indexer.c
@@ -437,13 +437,13 @@ S_maybe_merge(Indexer *self, VArray *seg_readers) {
         SegReader *seg_reader
             = (SegReader*)CERTIFY(VA_Fetch(to_merge, i), SEGREADER);
         String *seg_name = SegReader_Get_Seg_Name(seg_reader);
-        if (Hash_Fetch(seen, (Obj*)seg_name)) {
+        if (Hash_Fetch(seen, seg_name)) {
             DECREF(seen);
             DECREF(to_merge);
             THROW(ERR, "Recycle() tried to merge segment '%o' twice",
                   seg_name);
         }
-        Hash_Store(seen, (Obj*)seg_name, (Obj*)CFISH_TRUE);
+        Hash_Store(seen, seg_name, (Obj*)CFISH_TRUE);
     }
     DECREF(seen);
 

http://git-wip-us.apache.org/repos/asf/lucy/blob/8e232df5/core/Lucy/Index/LexiconWriter.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/LexiconWriter.c b/core/Lucy/Index/LexiconWriter.c
index f74776f..1ef32f1 100644
--- a/core/Lucy/Index/LexiconWriter.c
+++ b/core/Lucy/Index/LexiconWriter.c
@@ -160,9 +160,8 @@ LexWriter_Finish_Field_IMP(LexiconWriter *self, int32_t field_num) {
     String *field = Seg_Field_Name(ivars->segment, field_num);
 
     // Store count of terms for this field as metadata.
-    Hash_Store(ivars->counts, (Obj*)field,
-               (Obj*)Str_newf("%i32", ivars->count));
-    Hash_Store(ivars->ix_counts, (Obj*)field,
+    Hash_Store(ivars->counts, field, (Obj*)Str_newf("%i32", ivars->count));
+    Hash_Store(ivars->ix_counts, field,
                (Obj*)Str_newf("%i32", ivars->ix_count));
 
     // Close streams.

http://git-wip-us.apache.org/repos/asf/lucy/blob/8e232df5/core/Lucy/Index/PolyReader.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/PolyReader.c b/core/Lucy/Index/PolyReader.c
index 023035e..e941a61 100644
--- a/core/Lucy/Index/PolyReader.c
+++ b/core/Lucy/Index/PolyReader.c
@@ -120,11 +120,11 @@ S_init_sub_readers(PolyReader *self, VArray *sub_readers) {
         starts[i] = ivars->doc_max;
         ivars->doc_max += SegReader_Doc_Max(seg_reader);
         Hash_Iterate(components);
-        while (Hash_Next(components, (Obj**)&api, (Obj**)&component)) {
-            VArray *readers = (VArray*)Hash_Fetch(data_readers, (Obj*)api);
+        while (Hash_Next(components, &api, (Obj**)&component)) {
+            VArray *readers = (VArray*)Hash_Fetch(data_readers, api);
             if (!readers) {
                 readers = VA_new(num_sub_readers);
-                Hash_Store(data_readers, (Obj*)api, (Obj*)readers);
+                Hash_Store(data_readers, api, (Obj*)readers);
             }
             VA_Store(readers, i, INCREF(component));
         }
@@ -134,21 +134,21 @@ S_init_sub_readers(PolyReader *self, VArray *sub_readers) {
     String *api;
     VArray *readers;
     Hash_Iterate(data_readers);
-    while (Hash_Next(data_readers, (Obj**)&api, (Obj**)&readers)) {
+    while (Hash_Next(data_readers, &api, (Obj**)&readers)) {
         DataReader *datareader
             = (DataReader*)CERTIFY(S_first_non_null(readers), DATAREADER);
         DataReader *aggregator
             = DataReader_Aggregator(datareader, readers, ivars->offsets);
         if (aggregator) {
             CERTIFY(aggregator, DATAREADER);
-            Hash_Store(ivars->components, (Obj*)api, (Obj*)aggregator);
+            Hash_Store(ivars->components, api, (Obj*)aggregator);
         }
     }
     DECREF(data_readers);
 
     DeletionsReader *del_reader
         = (DeletionsReader*)Hash_Fetch(
-              ivars->components, (Obj*)Class_Get_Name(DELETIONSREADER));
+              ivars->components, Class_Get_Name(DELETIONSREADER));
     ivars->del_count = del_reader ? DelReader_Del_Count(del_reader) : 0;
 }
 

http://git-wip-us.apache.org/repos/asf/lucy/blob/8e232df5/core/Lucy/Index/SegLexicon.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/SegLexicon.c b/core/Lucy/Index/SegLexicon.c
index 31cf2bd..c30bf06 100644
--- a/core/Lucy/Index/SegLexicon.c
+++ b/core/Lucy/Index/SegLexicon.c
@@ -72,7 +72,7 @@ SegLex_init(SegLexicon *self, Schema *schema, Folder *folder,
     // Extract count from metadata.
     if (!counts) { THROW(ERR, "Failed to extract 'counts'"); }
     else {
-        Obj *count = CERTIFY(Hash_Fetch(counts, (Obj*)field), OBJ);
+        Obj *count = CERTIFY(Hash_Fetch(counts, field), OBJ);
         ivars->size = (int32_t)Obj_To_I64(count);
     }
 

http://git-wip-us.apache.org/repos/asf/lucy/blob/8e232df5/core/Lucy/Index/SegReader.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/SegReader.c b/core/Lucy/Index/SegReader.c
index 4ebcb76..823aa27 100644
--- a/core/Lucy/Index/SegReader.c
+++ b/core/Lucy/Index/SegReader.c
@@ -62,7 +62,7 @@ SegReader_init(SegReader *self, Schema *schema, Folder *folder,
 
     DeletionsReader *del_reader
         = (DeletionsReader*)Hash_Fetch(
-              ivars->components, (Obj*)Class_Get_Name(DELETIONSREADER));
+              ivars->components, Class_Get_Name(DELETIONSREADER));
     ivars->del_count = del_reader ? DelReader_Del_Count(del_reader) : 0;
 
     return self;
@@ -87,11 +87,11 @@ void
 SegReader_Register_IMP(SegReader *self, String *api,
                        DataReader *component) {
     SegReaderIVARS *const ivars = SegReader_IVARS(self);
-    if (Hash_Fetch(ivars->components, (Obj*)api)) {
+    if (Hash_Fetch(ivars->components, api)) {
         THROW(ERR, "Interface '%o' already registered");
     }
     CERTIFY(component, DATAREADER);
-    Hash_Store(ivars->components, (Obj*)api, (Obj*)component);
+    Hash_Store(ivars->components, api, (Obj*)component);
 }
 
 String*

http://git-wip-us.apache.org/repos/asf/lucy/blob/8e232df5/core/Lucy/Index/SegWriter.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/SegWriter.c b/core/Lucy/Index/SegWriter.c
index 6d360ff..c557e3d 100644
--- a/core/Lucy/Index/SegWriter.c
+++ b/core/Lucy/Index/SegWriter.c
@@ -65,16 +65,16 @@ SegWriter_Register_IMP(SegWriter *self, String *api,
                        DataWriter *component) {
     SegWriterIVARS *const ivars = SegWriter_IVARS(self);
     CERTIFY(component, DATAWRITER);
-    if (Hash_Fetch(ivars->by_api, (Obj*)api)) {
+    if (Hash_Fetch(ivars->by_api, api)) {
         THROW(ERR, "API %o already registered", api);
     }
-    Hash_Store(ivars->by_api, (Obj*)api, (Obj*)component);
+    Hash_Store(ivars->by_api, api, (Obj*)component);
 }
 
 Obj*
 SegWriter_Fetch_IMP(SegWriter *self, String *api) {
     SegWriterIVARS *const ivars = SegWriter_IVARS(self);
-    return Hash_Fetch(ivars->by_api, (Obj*)api);
+    return Hash_Fetch(ivars->by_api, api);
 }
 
 void

http://git-wip-us.apache.org/repos/asf/lucy/blob/8e232df5/core/Lucy/Index/Segment.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/Segment.c b/core/Lucy/Index/Segment.c
index 785387a..4d6f203 100644
--- a/core/Lucy/Index/Segment.c
+++ b/core/Lucy/Index/Segment.c
@@ -160,13 +160,13 @@ Seg_Write_File_IMP(Segment *self, Folder *folder) {
 int32_t
 Seg_Add_Field_IMP(Segment *self, String *field) {
     SegmentIVARS *const ivars = Seg_IVARS(self);
-    Integer32 *num = (Integer32*)Hash_Fetch(ivars->by_name, (Obj*)field);
+    Integer32 *num = (Integer32*)Hash_Fetch(ivars->by_name, field);
     if (num) {
         return Int32_Get_Value(num);
     }
     else {
         int32_t field_num = VA_Get_Size(ivars->by_num);
-        Hash_Store(ivars->by_name, (Obj*)field, (Obj*)Int32_new(field_num));
+        Hash_Store(ivars->by_name, field, (Obj*)Int32_new(field_num));
         VA_Push(ivars->by_num, (Obj*)Str_Clone(field));
         return field_num;
     }
@@ -202,10 +202,10 @@ Seg_Increment_Count_IMP(Segment *self, int64_t increment) {
 void
 Seg_Store_Metadata_IMP(Segment *self, String *key, Obj *value) {
     SegmentIVARS *const ivars = Seg_IVARS(self);
-    if (Hash_Fetch(ivars->metadata, (Obj*)key)) {
+    if (Hash_Fetch(ivars->metadata, key)) {
         THROW(ERR, "Metadata key '%o' already registered", key);
     }
-    Hash_Store(ivars->metadata, (Obj*)key, value);
+    Hash_Store(ivars->metadata, key, value);
 }
 
 void
@@ -218,7 +218,7 @@ Seg_Store_Metadata_Utf8_IMP(Segment *self, const char *key, size_t key_len,
 Obj*
 Seg_Fetch_Metadata_IMP(Segment *self, String *key) {
     SegmentIVARS *const ivars = Seg_IVARS(self);
-    return Hash_Fetch(ivars->metadata, (Obj*)key);
+    return Hash_Fetch(ivars->metadata, key);
 }
 
 Obj*
@@ -257,7 +257,7 @@ Seg_Field_Num_IMP(Segment *self, String *field) {
     }
     else {
         SegmentIVARS *const ivars = Seg_IVARS(self);
-        Integer32 *num = (Integer32*)Hash_Fetch(ivars->by_name, (Obj*)field);
+        Integer32 *num = (Integer32*)Hash_Fetch(ivars->by_name, field);
         return num ? Int32_Get_Value(num) : 0;
     }
 }

http://git-wip-us.apache.org/repos/asf/lucy/blob/8e232df5/core/Lucy/Index/Snapshot.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/Snapshot.c b/core/Lucy/Index/Snapshot.c
index d004eb1..fa4739a 100644
--- a/core/Lucy/Index/Snapshot.c
+++ b/core/Lucy/Index/Snapshot.c
@@ -62,13 +62,13 @@ Snapshot_Destroy_IMP(Snapshot *self) {
 void
 Snapshot_Add_Entry_IMP(Snapshot *self, String *entry) {
     SnapshotIVARS *const ivars = Snapshot_IVARS(self);
-    Hash_Store(ivars->entries, (Obj*)entry, (Obj*)CFISH_TRUE);
+    Hash_Store(ivars->entries, entry, (Obj*)CFISH_TRUE);
 }
 
 bool
 Snapshot_Delete_Entry_IMP(Snapshot *self, String *entry) {
     SnapshotIVARS *const ivars = Snapshot_IVARS(self);
-    Obj *val = Hash_Delete(ivars->entries, (Obj*)entry);
+    Obj *val = Hash_Delete(ivars->entries, entry);
     if (val) {
         DECREF(val);
         return true;
@@ -140,7 +140,7 @@ Snapshot_Read_File_IMP(Snapshot *self, Folder *folder, String *path) {
         for (uint32_t i = 0, max = VA_Get_Size(list); i < max; i++) {
             String *entry
                 = (String*)CERTIFY(VA_Fetch(list, i), STRING);
-            Hash_Store(ivars->entries, (Obj*)entry, (Obj*)CFISH_TRUE);
+            Hash_Store(ivars->entries, entry, (Obj*)CFISH_TRUE);
         }
 
         DECREF(list);

http://git-wip-us.apache.org/repos/asf/lucy/blob/8e232df5/core/Lucy/Index/SortReader.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/SortReader.c b/core/Lucy/Index/SortReader.c
index 6bff395..c438e6c 100644
--- a/core/Lucy/Index/SortReader.c
+++ b/core/Lucy/Index/SortReader.c
@@ -159,7 +159,7 @@ S_lazy_init_sort_cache(DefaultSortReader *self, String *field) {
     DefaultSortReaderIVARS *const ivars = DefSortReader_IVARS(self);
 
     // See if we have any values.
-    Obj *count_obj = Hash_Fetch(ivars->counts, (Obj*)field);
+    Obj *count_obj = Hash_Fetch(ivars->counts, field);
     int32_t count = count_obj ? (int32_t)Obj_To_I64(count_obj) : 0;
     if (!count) { return NULL; }
 
@@ -204,9 +204,9 @@ S_lazy_init_sort_cache(DefaultSortReader *self, String *field) {
               field, Err_get_error());
     }
 
-    Obj     *null_ord_obj = Hash_Fetch(ivars->null_ords, (Obj*)field);
+    Obj     *null_ord_obj = Hash_Fetch(ivars->null_ords, field);
     int32_t  null_ord = null_ord_obj ? (int32_t)Obj_To_I64(null_ord_obj) : -1;
-    Obj     *ord_width_obj = Hash_Fetch(ivars->ord_widths, (Obj*)field);
+    Obj     *ord_width_obj = Hash_Fetch(ivars->ord_widths, field);
     int32_t  ord_width = ord_width_obj
                          ? (int32_t)Obj_To_I64(ord_width_obj)
                          : S_calc_ord_width(count);
@@ -242,7 +242,7 @@ S_lazy_init_sort_cache(DefaultSortReader *self, String *field) {
         default:
             THROW(ERR, "No SortCache class for %o", type);
     }
-    Hash_Store(ivars->caches, (Obj*)field, (Obj*)cache);
+    Hash_Store(ivars->caches, field, (Obj*)cache);
 
     if (ivars->format == 2) { // bug compatibility
         SortCache_Set_Native_Ords(cache, true);
@@ -262,7 +262,7 @@ DefSortReader_Fetch_Sort_Cache_IMP(DefaultSortReader *self,
 
     if (field) {
         DefaultSortReaderIVARS *const ivars = DefSortReader_IVARS(self);
-        cache = (SortCache*)Hash_Fetch(ivars->caches, (Obj*)field);
+        cache = (SortCache*)Hash_Fetch(ivars->caches, field);
         if (!cache) {
             cache = S_lazy_init_sort_cache(self, field);
         }

http://git-wip-us.apache.org/repos/asf/lucy/blob/8e232df5/core/Lucy/Index/SortWriter.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/SortWriter.c b/core/Lucy/Index/SortWriter.c
index 334b852..f2d18de 100644
--- a/core/Lucy/Index/SortWriter.c
+++ b/core/Lucy/Index/SortWriter.c
@@ -220,15 +220,14 @@ SortWriter_Finish_IMP(SortWriter *self) {
             String *field = Seg_Field_Name(ivars->segment, i);
             SortFieldWriter_Flip(field_writer);
             int32_t count = SortFieldWriter_Finish(field_writer);
-            Hash_Store(ivars->counts, (Obj*)field,
-                       (Obj*)Str_newf("%i32", count));
+            Hash_Store(ivars->counts, field, (Obj*)Str_newf("%i32", count));
             int32_t null_ord = SortFieldWriter_Get_Null_Ord(field_writer);
             if (null_ord != -1) {
-                Hash_Store(ivars->null_ords, (Obj*)field,
+                Hash_Store(ivars->null_ords, field,
                            (Obj*)Str_newf("%i32", null_ord));
             }
             int32_t ord_width = SortFieldWriter_Get_Ord_Width(field_writer);
-            Hash_Store(ivars->ord_widths, (Obj*)field,
+            Hash_Store(ivars->ord_widths, field,
                        (Obj*)Str_newf("%i32", ord_width));
         }
 

http://git-wip-us.apache.org/repos/asf/lucy/blob/8e232df5/core/Lucy/Plan/Schema.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Plan/Schema.c b/core/Lucy/Plan/Schema.c
index 8e778bf..0bf9842 100644
--- a/core/Lucy/Plan/Schema.c
+++ b/core/Lucy/Plan/Schema.c
@@ -148,12 +148,12 @@ S_add_text_field(Schema *self, String *field, FieldType *type) {
     Analyzer     *analyzer  = FullTextType_Get_Analyzer(fttype);
 
     // Cache helpers.
-    Hash_Store(ivars->sims, (Obj*)field, (Obj*)sim);
-    Hash_Store(ivars->analyzers, (Obj*)field, INCREF(analyzer));
+    Hash_Store(ivars->sims, field, (Obj*)sim);
+    Hash_Store(ivars->analyzers, field, INCREF(analyzer));
     S_add_unique(ivars->uniq_analyzers, (Obj*)analyzer);
 
     // Store FieldType.
-    Hash_Store(ivars->types, (Obj*)field, INCREF(type));
+    Hash_Store(ivars->types, field, INCREF(type));
 }
 
 static void
@@ -163,37 +163,37 @@ S_add_string_field(Schema *self, String *field, FieldType *type) {
     Similarity *sim         = StringType_Make_Similarity(string_type);
 
     // Cache helpers.
-    Hash_Store(ivars->sims, (Obj*)field, (Obj*)sim);
+    Hash_Store(ivars->sims, field, (Obj*)sim);
 
     // Store FieldType.
-    Hash_Store(ivars->types, (Obj*)field, INCREF(type));
+    Hash_Store(ivars->types, field, INCREF(type));
 }
 
 static void
 S_add_blob_field(Schema *self, String *field, FieldType *type) {
     SchemaIVARS *const ivars = Schema_IVARS(self);
     BlobType *blob_type = (BlobType*)CERTIFY(type, BLOBTYPE);
-    Hash_Store(ivars->types, (Obj*)field, INCREF(blob_type));
+    Hash_Store(ivars->types, field, INCREF(blob_type));
 }
 
 static void
 S_add_numeric_field(Schema *self, String *field, FieldType *type) {
     SchemaIVARS *const ivars = Schema_IVARS(self);
     NumericType *num_type = (NumericType*)CERTIFY(type, NUMERICTYPE);
-    Hash_Store(ivars->types, (Obj*)field, INCREF(num_type));
+    Hash_Store(ivars->types, field, INCREF(num_type));
 }
 
 FieldType*
 Schema_Fetch_Type_IMP(Schema *self, String *field) {
     SchemaIVARS *const ivars = Schema_IVARS(self);
-    return (FieldType*)Hash_Fetch(ivars->types, (Obj*)field);
+    return (FieldType*)Hash_Fetch(ivars->types, field);
 }
 
 Analyzer*
 Schema_Fetch_Analyzer_IMP(Schema *self, String *field) {
     SchemaIVARS *const ivars = Schema_IVARS(self);
     return field
-           ? (Analyzer*)Hash_Fetch(ivars->analyzers, (Obj*)field)
+           ? (Analyzer*)Hash_Fetch(ivars->analyzers, field)
            : NULL;
 }
 
@@ -202,7 +202,7 @@ Schema_Fetch_Sim_IMP(Schema *self, String *field) {
     SchemaIVARS *const ivars = Schema_IVARS(self);
     Similarity *sim = NULL;
     if (field != NULL) {
-        sim = (Similarity*)Hash_Fetch(ivars->sims, (Obj*)field);
+        sim = (Similarity*)Hash_Fetch(ivars->sims, field);
     }
     return sim;
 }
@@ -264,7 +264,7 @@ Schema_Dump_IMP(Schema *self) {
     // Dump FieldTypes.
     Hash_Store_Utf8(dump, "fields", 6, (Obj*)type_dumps);
     Hash_Iterate(ivars->types);
-    while (Hash_Next(ivars->types, (Obj**)&field, (Obj**)&type)) {
+    while (Hash_Next(ivars->types, &field, (Obj**)&type)) {
         Class *type_class = FType_Get_Class(type);
 
         // Dump known types to simplified format.
@@ -279,15 +279,15 @@ Schema_Dump_IMP(Schema *self) {
             Hash_Store_Utf8(type_dump, "analyzer", 8,
                             (Obj*)Str_newf("%u32", tick));
 
-            Hash_Store(type_dumps, (Obj*)field, (Obj*)type_dump);
+            Hash_Store(type_dumps, field, (Obj*)type_dump);
         }
         else if (type_class == STRINGTYPE || type_class == BLOBTYPE) {
             Hash *type_dump = FType_Dump_For_Schema(type);
-            Hash_Store(type_dumps, (Obj*)field, (Obj*)type_dump);
+            Hash_Store(type_dumps, field, (Obj*)type_dump);
         }
         // Unknown FieldType type, so punt.
         else {
-            Hash_Store(type_dumps, (Obj*)field, FType_Dump(type));
+            Hash_Store(type_dumps, field, FType_Dump(type));
         }
     }
 
@@ -325,7 +325,7 @@ Schema_Load_IMP(Schema *self, Obj *dump) {
     VA_Grow(loaded_ivars->uniq_analyzers, VA_Get_Size(analyzers));
 
     Hash_Iterate(type_dumps);
-    while (Hash_Next(type_dumps, (Obj**)&field, (Obj**)&type_dump)) {
+    while (Hash_Next(type_dumps, &field, (Obj**)&type_dump)) {
         String *type_str;
         CERTIFY(type_dump, HASH);
         type_str = (String*)Hash_Fetch_Utf8(type_dump, "type", 4);
@@ -412,7 +412,7 @@ Schema_Eat_IMP(Schema *self, Schema *other) {
     FieldType *type;
     SchemaIVARS *const ovars = Schema_IVARS(other);
     Hash_Iterate(ovars->types);
-    while (Hash_Next(ovars->types, (Obj**)&field, (Obj**)&type)) {
+    while (Hash_Next(ovars->types, &field, (Obj**)&type)) {
         Schema_Spec_Field(self, field, type);
     }
 }

http://git-wip-us.apache.org/repos/asf/lucy/blob/8e232df5/core/Lucy/Store/CompoundFileReader.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Store/CompoundFileReader.c b/core/Lucy/Store/CompoundFileReader.c
index afc8cd4..556c14b 100644
--- a/core/Lucy/Store/CompoundFileReader.c
+++ b/core/Lucy/Store/CompoundFileReader.c
@@ -95,11 +95,11 @@ CFReader_do_open(CompoundFileReader *self, Folder *folder) {
         for (uint32_t i = 0, max = VA_Get_Size(files); i < max; i++) {
             String *orig = (String*)VA_Fetch(files, i);
             if (Str_Starts_With(orig, folder_name)) {
-                Obj *record = Hash_Delete(ivars->records, (Obj*)orig);
+                Obj *record = Hash_Delete(ivars->records, orig);
                 size_t offset = folder_name_len + sizeof(CHY_DIR_SEP) - 1;
                 size_t len    = Str_Length(orig) - offset;
                 String *filename = Str_SubString(orig, offset, len);
-                Hash_Store(ivars->records, (Obj*)filename, (Obj*)record);
+                Hash_Store(ivars->records, filename, (Obj*)record);
                 DECREF(filename);
             }
         }
@@ -139,7 +139,7 @@ FileHandle*
 CFReader_Local_Open_FileHandle_IMP(CompoundFileReader *self,
                                    String *name, uint32_t flags) {
     CompoundFileReaderIVARS *const ivars = CFReader_IVARS(self);
-    Hash *entry = (Hash*)Hash_Fetch(ivars->records, (Obj*)name);
+    Hash *entry = (Hash*)Hash_Fetch(ivars->records, name);
     FileHandle *fh = NULL;
 
     if (entry) {
@@ -159,7 +159,7 @@ CFReader_Local_Open_FileHandle_IMP(CompoundFileReader *self,
 bool
 CFReader_Local_Delete_IMP(CompoundFileReader *self, String *name) {
     CompoundFileReaderIVARS *const ivars = CFReader_IVARS(self);
-    Hash *record = (Hash*)Hash_Delete(ivars->records, (Obj*)name);
+    Hash *record = (Hash*)Hash_Delete(ivars->records, name);
     DECREF(record);
 
     if (record == NULL) {
@@ -186,7 +186,7 @@ CFReader_Local_Delete_IMP(CompoundFileReader *self, String *name) {
 InStream*
 CFReader_Local_Open_In_IMP(CompoundFileReader *self, String *name) {
     CompoundFileReaderIVARS *const ivars = CFReader_IVARS(self);
-    Hash *entry = (Hash*)Hash_Fetch(ivars->records, (Obj*)name);
+    Hash *entry = (Hash*)Hash_Fetch(ivars->records, name);
 
     if (!entry) {
         InStream *instream = Folder_Local_Open_In(ivars->real_folder, name);
@@ -220,7 +220,7 @@ CFReader_Local_Open_In_IMP(CompoundFileReader *self, String *name) {
 bool
 CFReader_Local_Exists_IMP(CompoundFileReader *self, String *name) {
     CompoundFileReaderIVARS *const ivars = CFReader_IVARS(self);
-    if (Hash_Fetch(ivars->records, (Obj*)name))        { return true; }
+    if (Hash_Fetch(ivars->records, name))              { return true; }
     if (Folder_Local_Exists(ivars->real_folder, name)) { return true; }
     return false;
 }
@@ -229,7 +229,7 @@ bool
 CFReader_Local_Is_Directory_IMP(CompoundFileReader *self,
                                 String *name) {
     CompoundFileReaderIVARS *const ivars = CFReader_IVARS(self);
-    if (Hash_Fetch(ivars->records, (Obj*)name))              { return false; }
+    if (Hash_Fetch(ivars->records, name))                    { return false; }
     if (Folder_Local_Is_Directory(ivars->real_folder, name)) { return true; }
     return false;
 }
@@ -243,7 +243,7 @@ CFReader_Close_IMP(CompoundFileReader *self) {
 bool
 CFReader_Local_MkDir_IMP(CompoundFileReader *self, String *name) {
     CompoundFileReaderIVARS *const ivars = CFReader_IVARS(self);
-    if (Hash_Fetch(ivars->records, (Obj*)name)) {
+    if (Hash_Fetch(ivars->records, name)) {
         Err_set_error(Err_new(Str_newf("Can't MkDir: '%o' exists", name)));
         return false;
     }
@@ -258,7 +258,7 @@ Folder*
 CFReader_Local_Find_Folder_IMP(CompoundFileReader *self,
                                String *name) {
     CompoundFileReaderIVARS *const ivars = CFReader_IVARS(self);
-    if (Hash_Fetch(ivars->records, (Obj*)name)) { return false; }
+    if (Hash_Fetch(ivars->records, name)) { return false; }
     return Folder_Local_Find_Folder(ivars->real_folder, name);
 }
 

http://git-wip-us.apache.org/repos/asf/lucy/blob/8e232df5/core/Lucy/Store/CompoundFileWriter.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Store/CompoundFileWriter.c b/core/Lucy/Store/CompoundFileWriter.c
index 025ef65..b3b137e 100644
--- a/core/Lucy/Store/CompoundFileWriter.c
+++ b/core/Lucy/Store/CompoundFileWriter.c
@@ -130,7 +130,7 @@ S_do_consolidate(CompoundFileWriter *self, CompoundFileWriterIVARS *ivars) {
                             (Obj*)Str_newf("%i64", offset));
             Hash_Store_Utf8(file_data, "length", 6,
                             (Obj*)Str_newf("%i64", len));
-            Hash_Store(sub_files, (Obj*)infilename, (Obj*)file_data);
+            Hash_Store(sub_files, infilename, (Obj*)file_data);
             VA_Push(merged, INCREF(infilename));
 
             // Add filler NULL bytes so that every sub-file begins on a file
@@ -158,7 +158,7 @@ S_do_consolidate(CompoundFileWriter *self, CompoundFileWriterIVARS *ivars) {
     String *merged_file;
     Obj    *ignore;
     Hash_Iterate(sub_files);
-    while (Hash_Next(sub_files, (Obj**)&merged_file, &ignore)) {
+    while (Hash_Next(sub_files, &merged_file, &ignore)) {
         if (!Folder_Delete(folder, merged_file)) {
             String *mess = MAKE_MESS("Can't delete '%o'", merged_file);
             DECREF(sub_files);

http://git-wip-us.apache.org/repos/asf/lucy/blob/8e232df5/core/Lucy/Store/FSFolder.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Store/FSFolder.c b/core/Lucy/Store/FSFolder.c
index 503a22f..a02979d 100644
--- a/core/Lucy/Store/FSFolder.c
+++ b/core/Lucy/Store/FSFolder.c
@@ -140,7 +140,7 @@ FSFolder_Local_Open_Dir_IMP(FSFolder *self) {
 bool
 FSFolder_Local_Exists_IMP(FSFolder *self, String *name) {
     FSFolderIVARS *const ivars = FSFolder_IVARS(self);
-    if (Hash_Fetch(ivars->entries, (Obj*)name)) {
+    if (Hash_Fetch(ivars->entries, name)) {
         return true;
     }
     else if (!S_is_local_entry(name)) {
@@ -163,7 +163,7 @@ FSFolder_Local_Is_Directory_IMP(FSFolder *self, String *name) {
     FSFolderIVARS *const ivars = FSFolder_IVARS(self);
 
     // Check for a cached object, then fall back to a system call.
-    Obj *elem = Hash_Fetch(ivars->entries, (Obj*)name);
+    Obj *elem = Hash_Fetch(ivars->entries, name);
     if (elem && Obj_Is_A(elem, FOLDER)) {
         return true;
     }
@@ -210,7 +210,7 @@ FSFolder_Local_Delete_IMP(FSFolder *self, String *name) {
 #else
     bool result = !rmdir(path_ptr) || !remove(path_ptr);
 #endif
-    DECREF(Hash_Delete(ivars->entries, (Obj*)name));
+    DECREF(Hash_Delete(ivars->entries, name));
     FREEMEM(path_ptr);
     return result;
 }
@@ -237,7 +237,7 @@ FSFolder_Local_Find_Folder_IMP(FSFolder *self, String *name) {
         // Don't allow access outside of the main dir.
         return NULL;
     }
-    else if (NULL != (subfolder = (Folder*)Hash_Fetch(ivars->entries, (Obj*)name))) {
+    else if (NULL != (subfolder = (Folder*)Hash_Fetch(ivars->entries, name))) {
         if (Folder_Is_A(subfolder, FOLDER)) {
             return subfolder;
         }
@@ -263,7 +263,7 @@ FSFolder_Local_Find_Folder_IMP(FSFolder *self, String *name) {
                 subfolder = (Folder*)cf_reader;
             }
         }
-        Hash_Store(ivars->entries, (Obj*)name, (Obj*)subfolder);
+        Hash_Store(ivars->entries, name, (Obj*)subfolder);
     }
     DECREF(fullpath);
 

http://git-wip-us.apache.org/repos/asf/lucy/blob/8e232df5/core/Lucy/Store/Folder.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Store/Folder.c b/core/Lucy/Store/Folder.c
index 7ec92b6..4f639e0 100644
--- a/core/Lucy/Store/Folder.c
+++ b/core/Lucy/Store/Folder.c
@@ -425,7 +425,7 @@ Folder_Consolidate_IMP(Folder *self, String *path) {
             if (!cf_reader) { RETHROW(INCREF(Err_get_error())); }
             Hash *entries = Folder_IVARS(enclosing_folder)->entries;
             String *name = IxFileNames_local_part(path);
-            Hash_Store(entries, (Obj*)name, (Obj*)cf_reader);
+            Hash_Store(entries, name, (Obj*)cf_reader);
             DECREF(name);
         }
     }

http://git-wip-us.apache.org/repos/asf/lucy/blob/8e232df5/core/Lucy/Store/RAMFolder.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Store/RAMFolder.c b/core/Lucy/Store/RAMFolder.c
index cd1c981..c6ca285 100644
--- a/core/Lucy/Store/RAMFolder.c
+++ b/core/Lucy/Store/RAMFolder.c
@@ -56,15 +56,14 @@ RAMFolder_Check_IMP(RAMFolder *self) {
 bool
 RAMFolder_Local_MkDir_IMP(RAMFolder *self, String *name) {
     RAMFolderIVARS *const ivars = RAMFolder_IVARS(self);
-    if (Hash_Fetch(ivars->entries, (Obj*)name)) {
+    if (Hash_Fetch(ivars->entries, name)) {
         Err_set_error(Err_new(Str_newf("Can't MkDir, '%o' already exists",
                                        name)));
         return false;
     }
     else {
         String *fullpath = S_fullpath(self, name);
-        Hash_Store(ivars->entries, (Obj*)name,
-                   (Obj*)RAMFolder_new(fullpath));
+        Hash_Store(ivars->entries, name, (Obj*)RAMFolder_new(fullpath));
         DECREF(fullpath);
         return true;
     }
@@ -76,7 +75,7 @@ RAMFolder_Local_Open_FileHandle_IMP(RAMFolder *self, String *name,
     RAMFolderIVARS *const ivars = RAMFolder_IVARS(self);
     RAMFileHandle *fh;
     String *fullpath = S_fullpath(self, name);
-    RAMFile *file = (RAMFile*)Hash_Fetch(ivars->entries, (Obj*)name);
+    RAMFile *file = (RAMFile*)Hash_Fetch(ivars->entries, name);
     bool can_create
         = (flags & (FH_WRITE_ONLY | FH_CREATE)) == (FH_WRITE_ONLY | FH_CREATE)
           ? true : false;
@@ -101,7 +100,7 @@ RAMFolder_Local_Open_FileHandle_IMP(RAMFolder *self, String *name,
     if (fh) {
         if (!file) {
             file = RAMFH_Get_File(fh);
-            Hash_Store(ivars->entries, (Obj*)name, INCREF(file));
+            Hash_Store(ivars->entries, name, INCREF(file));
         }
     }
     else {
@@ -124,13 +123,13 @@ RAMFolder_Local_Open_Dir_IMP(RAMFolder *self) {
 bool
 RAMFolder_Local_Exists_IMP(RAMFolder *self, String *name) {
     RAMFolderIVARS *const ivars = RAMFolder_IVARS(self);
-    return !!Hash_Fetch(ivars->entries, (Obj*)name);
+    return !!Hash_Fetch(ivars->entries, name);
 }
 
 bool
 RAMFolder_Local_Is_Directory_IMP(RAMFolder *self, String *name) {
     RAMFolderIVARS *const ivars = RAMFolder_IVARS(self);
-    Obj *entry = Hash_Fetch(ivars->entries, (Obj*)name);
+    Obj *entry = Hash_Fetch(ivars->entries, name);
     if (entry && Obj_Is_A(entry, FOLDER)) { return true; }
     return false;
 }
@@ -186,8 +185,7 @@ S_rename_or_hard_link(RAMFolder *self, String* from, String *to,
     }
 
     // Find the original element.
-    elem = Hash_Fetch(RAMFolder_IVARS(inner_from_folder)->entries,
-                      (Obj*)from_name);
+    elem = Hash_Fetch(RAMFolder_IVARS(inner_from_folder)->entries, from_name);
     if (!elem) {
         if (Folder_Is_A(from_folder, COMPOUNDFILEREADER)
             && Folder_Local_Exists(from_folder, from_name)
@@ -204,7 +202,7 @@ S_rename_or_hard_link(RAMFolder *self, String* from, String *to,
     // Execute the rename/hard-link.
     if (op == OP_RENAME) {
         Obj *existing = Hash_Fetch(RAMFolder_IVARS(inner_to_folder)->entries,
-                                   (Obj*)to_name);
+                                   to_name);
         if (existing) {
             bool conflict = false;
 
@@ -237,9 +235,9 @@ S_rename_or_hard_link(RAMFolder *self, String* from, String *to,
         // Perform the store first, then the delete. Inform Folder objects
         // about the relocation.
         Hash_Store(RAMFolder_IVARS(inner_to_folder)->entries,
-                   (Obj*)to_name, INCREF(elem));
+                   to_name, INCREF(elem));
         DECREF(Hash_Delete(RAMFolder_IVARS(inner_from_folder)->entries,
-                           (Obj*)from_name));
+                           from_name));
         if (Obj_Is_A(elem, FOLDER)) {
             String *newpath = S_fullpath(inner_to_folder, to_name);
             Folder_Set_Path((Folder*)elem, newpath);
@@ -255,14 +253,14 @@ S_rename_or_hard_link(RAMFolder *self, String* from, String *to,
         else {
             Obj *existing
                 = Hash_Fetch(RAMFolder_IVARS(inner_to_folder)->entries,
-                             (Obj*)to_name);
+                             to_name);
             if (existing) {
                 Err_set_error(Err_new(Str_newf("'%o' already exists", to)));
                 return false;
             }
             else {
                 Hash_Store(RAMFolder_IVARS(inner_to_folder)->entries,
-                           (Obj*)to_name, INCREF(elem));
+                           to_name, INCREF(elem));
             }
         }
     }
@@ -306,7 +304,7 @@ RAMFolder_Hard_Link_IMP(RAMFolder *self, String *from,
 bool
 RAMFolder_Local_Delete_IMP(RAMFolder *self, String *name) {
     RAMFolderIVARS *const ivars = RAMFolder_IVARS(self);
-    Obj *entry = Hash_Fetch(ivars->entries, (Obj*)name);
+    Obj *entry = Hash_Fetch(ivars->entries, name);
     if (entry) {
         if (Obj_Is_A(entry, RAMFILE)) {
             ;
@@ -329,7 +327,7 @@ RAMFolder_Local_Delete_IMP(RAMFolder *self, String *name) {
         else {
             return false;
         }
-        DECREF(Hash_Delete(ivars->entries, (Obj*)name));
+        DECREF(Hash_Delete(ivars->entries, name));
         return true;
     }
     else {
@@ -340,7 +338,7 @@ RAMFolder_Local_Delete_IMP(RAMFolder *self, String *name) {
 Folder*
 RAMFolder_Local_Find_Folder_IMP(RAMFolder *self, String *path) {
     RAMFolderIVARS *const ivars = RAMFolder_IVARS(self);
-    Folder *local_folder = (Folder*)Hash_Fetch(ivars->entries, (Obj*)path);
+    Folder *local_folder = (Folder*)Hash_Fetch(ivars->entries, path);
     if (local_folder && Folder_Is_A(local_folder, FOLDER)) {
         return local_folder;
     }

http://git-wip-us.apache.org/repos/asf/lucy/blob/8e232df5/core/Lucy/Test/Analysis/TestSnowballStemmer.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Test/Analysis/TestSnowballStemmer.c b/core/Lucy/Test/Analysis/TestSnowballStemmer.c
index 0c7c187..d5cf7ce 100644
--- a/core/Lucy/Test/Analysis/TestSnowballStemmer.c
+++ b/core/Lucy/Test/Analysis/TestSnowballStemmer.c
@@ -70,7 +70,7 @@ test_stemming(TestBatchRunner *runner) {
     String *iso;
     Hash *lang_data;
     Hash_Iterate(tests);
-    while (Hash_Next(tests, (Obj**)&iso, (Obj**)&lang_data)) {
+    while (Hash_Next(tests, &iso, (Obj**)&lang_data)) {
         VArray *words = (VArray*)Hash_Fetch_Utf8(lang_data, "words", 5);
         VArray *stems = (VArray*)Hash_Fetch_Utf8(lang_data, "stems", 5);
         SnowballStemmer *stemmer = SnowStemmer_new(iso);

http://git-wip-us.apache.org/repos/asf/lucy/blob/8e232df5/core/Lucy/Test/Store/TestCompoundFileWriter.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Test/Store/TestCompoundFileWriter.c b/core/Lucy/Test/Store/TestCompoundFileWriter.c
index 8c139a9..2cce467 100644
--- a/core/Lucy/Test/Store/TestCompoundFileWriter.c
+++ b/core/Lucy/Test/Store/TestCompoundFileWriter.c
@@ -125,7 +125,7 @@ test_offsets(TestBatchRunner *runner) {
     TEST_TRUE(runner, Hash_Get_Size(files) > 0, "Multiple files");
 
     Hash_Iterate(files);
-    while (Hash_Next(files, (Obj**)&file, &filestats)) {
+    while (Hash_Next(files, &file, &filestats)) {
         Hash *stats = (Hash*)CERTIFY(filestats, HASH);
         Obj *offset = CERTIFY(Hash_Fetch_Utf8(stats, "offset", 6), OBJ);
         int64_t offs = Obj_To_I64(offset);

http://git-wip-us.apache.org/repos/asf/lucy/blob/8e232df5/core/Lucy/Test/Util/TestFreezer.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Test/Util/TestFreezer.c b/core/Lucy/Test/Util/TestFreezer.c
index 9c0df3b..67f10d5 100644
--- a/core/Lucy/Test/Util/TestFreezer.c
+++ b/core/Lucy/Test/Util/TestFreezer.c
@@ -93,8 +93,7 @@ test_hash(TestBatchRunner *runner) {
     for (uint32_t i = 0; i < 10; i++) {
         String *str = TestUtils_random_string(rand() % 1200);
         Integer32 *num = Int32_new(i);
-        Hash_Store(wanted, (Obj*)str, (Obj*)num);
-        Hash_Store(wanted, (Obj*)num, (Obj*)str);
+        Hash_Store(wanted, str, (Obj*)num);
     }
 
     {

http://git-wip-us.apache.org/repos/asf/lucy/blob/8e232df5/core/Lucy/Test/Util/TestJson.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Test/Util/TestJson.c b/core/Lucy/Test/Util/TestJson.c
index 10c6f7d..b978cf4 100644
--- a/core/Lucy/Test/Util/TestJson.c
+++ b/core/Lucy/Test/Util/TestJson.c
@@ -337,24 +337,9 @@ test_max_depth(TestBatchRunner *runner) {
     DECREF(circular);
 }
 
-static void
-test_illegal_keys(TestBatchRunner *runner) {
-    Hash *hash = Hash_new(0);
-    Float64 *key = Float64_new(1.1);
-    Hash_Store(hash, (Obj*)key, (Obj*)Str_newf("blah"));
-    Err_set_error(NULL);
-    String *not_json = Json_to_json((Obj*)hash);
-    TEST_TRUE(runner, not_json == NULL,
-              "to_json returns NULL when fed an illegal key");
-    TEST_TRUE(runner, Err_get_error() != NULL,
-              "to_json sets global error when fed an illegal key");
-    DECREF(key);
-    DECREF(hash);
-}
-
 void
 TestJson_Run_IMP(TestJson *self, TestBatchRunner *runner) {
-    uint32_t num_tests = 107;
+    uint32_t num_tests = 105;
 #ifndef LUCY_VALGRIND
     num_tests += 28; // FIXME: syntax errors leak memory.
 #endif
@@ -371,7 +356,6 @@ TestJson_Run_IMP(TestJson *self, TestBatchRunner *runner) {
     test_integers(runner);
     test_floats(runner);
     test_max_depth(runner);
-    test_illegal_keys(runner);
 
 #ifndef LUCY_VALGRIND
     test_syntax_errors(runner);

http://git-wip-us.apache.org/repos/asf/lucy/blob/8e232df5/core/Lucy/Util/Freezer.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Util/Freezer.c b/core/Lucy/Util/Freezer.c
index b491d59..dce1884 100644
--- a/core/Lucy/Util/Freezer.c
+++ b/core/Lucy/Util/Freezer.c
@@ -288,64 +288,34 @@ Freezer_read_varray(InStream *instream) {
 
 void
 Freezer_serialize_hash(Hash *hash, OutStream *outstream) {
-    Obj *key;
-    Obj *val;
-    uint32_t string_count = 0;
+    String *key;
+    Obj    *val;
     uint32_t hash_size = Hash_Get_Size(hash);
     OutStream_Write_C32(outstream, hash_size);
 
-    // Write String keys first.  String keys are the common case; grouping
-    // them together is a form of run-length-encoding and saves space, since
-    // we omit the per-key class name.
     Hash_Iterate(hash);
     while (Hash_Next(hash, &key, &val)) {
-        if (Obj_Is_A(key, STRING)) { string_count++; }
-    }
-    OutStream_Write_C32(outstream, string_count);
-    Hash_Iterate(hash);
-    while (Hash_Next(hash, &key, &val)) {
-        if (Obj_Is_A(key, STRING)) {
-            Freezer_serialize_string((String*)key, outstream);
-            FREEZE(val, outstream);
-        }
-    }
-
-    // Punt on the classes of the remaining keys.
-    Hash_Iterate(hash);
-    while (Hash_Next(hash, &key, &val)) {
-        if (!Obj_Is_A(key, STRING)) {
-            FREEZE(key, outstream);
-            FREEZE(val, outstream);
-        }
+        Freezer_serialize_string(key, outstream);
+        FREEZE(val, outstream);
     }
 }
 
 Hash*
 Freezer_deserialize_hash(Hash *hash, InStream *instream) {
-    uint32_t size        = InStream_Read_C32(instream);
-    uint32_t num_strings = InStream_Read_C32(instream);
-    uint32_t num_other   = size - num_strings;
+    uint32_t size = InStream_Read_C32(instream);
 
     Hash_init(hash, size);
 
-    // Read key-value pairs with String keys.
-    while (num_strings--) {
+    while (size--) {
         uint32_t len = InStream_Read_C32(instream);
         char *key_buf = (char*)MALLOCATE(len + 1);
         InStream_Read_Bytes(instream, key_buf, len);
         key_buf[len] = '\0';
         String *key = Str_new_steal_utf8(key_buf, len);
-        Hash_Store(hash, (Obj*)key, THAW(instream));
+        Hash_Store(hash, key, THAW(instream));
         DECREF(key);
     }
 
-    // Read remaining key/value pairs.
-    while (num_other--) {
-        Obj *k = THAW(instream);
-        Hash_Store(hash, k, THAW(instream));
-        DECREF(k);
-    }
-
     return hash;
 }
 
@@ -369,15 +339,12 @@ S_dump_array(VArray *array) {
 
 Obj*
 S_dump_hash(Hash *hash) {
-    Hash *dump = Hash_new(Hash_Get_Size(hash));
-    Obj *key;
-    Obj *value;
+    Hash   *dump = Hash_new(Hash_Get_Size(hash));
+    String *key;
+    Obj    *value;
 
     Hash_Iterate(hash);
     while (Hash_Next(hash, &key, &value)) {
-        // Since JSON only supports text hash keys, dump() can only support
-        // text hash keys.
-        CERTIFY(key, STRING);
         Hash_Store(dump, key, Freezer_dump(value));
     }
 
@@ -480,9 +447,9 @@ S_load_from_hash(Hash *dump) {
     }
 
     // It's an ordinary Hash.
-    Hash *loaded = Hash_new(Hash_Get_Size(dump));
-    Obj *key;
-    Obj *value;
+    Hash   *loaded = Hash_new(Hash_Get_Size(dump));
+    String *key;
+    Obj    *value;
     Hash_Iterate(dump);
     while (Hash_Next(dump, &key, &value)) {
         Hash_Store(loaded, key, Freezer_load(value));

http://git-wip-us.apache.org/repos/asf/lucy/blob/8e232df5/core/Lucy/Util/Json.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Util/Json.c b/core/Lucy/Util/Json.c
index 92a3e2e..d7d9c8a 100644
--- a/core/Lucy/Util/Json.c
+++ b/core/Lucy/Util/Json.c
@@ -350,10 +350,10 @@ S_to_json(Obj *dump, CharBuf *buf, int32_t depth) {
         // Spread pairs across multiple lines.
         CB_Cat_Trusted_Utf8(buf, "{", 1);
         for (size_t i = 0; i < size; i++) {
-            Obj *key = VA_Fetch(keys, i);
+            String *key = (String*)VA_Fetch(keys, i);
             CB_Cat_Trusted_Utf8(buf, "\n", 1);
             S_cat_whitespace(buf, depth + 1);
-            S_append_json_string((String*)key, buf);
+            S_append_json_string(key, buf);
             CB_Cat_Trusted_Utf8(buf, ": ", 2);
             if (!S_to_json(Hash_Fetch(hash, key), buf, depth + 1)) {
                 DECREF(keys);

http://git-wip-us.apache.org/repos/asf/lucy/blob/8e232df5/core/Lucy/Util/Json/JsonParser.y
----------------------------------------------------------------------
diff --git a/core/Lucy/Util/Json/JsonParser.y b/core/Lucy/Util/Json/JsonParser.y
index 3e1b5c9..bdbfa1e 100644
--- a/core/Lucy/Util/Json/JsonParser.y
+++ b/core/Lucy/Util/Json/JsonParser.y
@@ -85,28 +85,28 @@ empty_object(A) ::= LEFT_CURLY_BRACKET RIGHT_CURLY_BRACKET.
 single_pair_object(A) ::= LEFT_CURLY_BRACKET STRING(B) COLON value(C) RIGHT_CURLY_BRACKET.
 {
     A = cfish_Hash_new(1);
-    CFISH_Hash_Store(A, (cfish_Obj*)B, C);
+    CFISH_Hash_Store(A, (cfish_String*)B, C);
     CFISH_DECREF(B);
 }
 
 multi_pair_object(A) ::= LEFT_CURLY_BRACKET key_value_pair_list(B) STRING(C) COLON value(D) RIGHT_CURLY_BRACKET.
 {
     A = B;
-    CFISH_Hash_Store(A, (cfish_Obj*)C, D);
+    CFISH_Hash_Store(A, (cfish_String*)C, D);
     CFISH_DECREF(C);
 }
 
 key_value_pair_list(A) ::= key_value_pair_list(B) STRING(C) COLON value(D) COMMA.
 { 
     A = B; 
-    CFISH_Hash_Store(A, (cfish_Obj*)C, D);
+    CFISH_Hash_Store(A, (cfish_String*)C, D);
     CFISH_DECREF(C);
 }
 
 key_value_pair_list(A) ::= STRING(B) COLON value(C) COMMA.
 {
     A = cfish_Hash_new(0);
-    CFISH_Hash_Store(A, (cfish_Obj*)B, C);
+    CFISH_Hash_Store(A, (cfish_String*)B, C);
     CFISH_DECREF(B);
 }
 

http://git-wip-us.apache.org/repos/asf/lucy/blob/8e232df5/go/lucy/index.go
----------------------------------------------------------------------
diff --git a/go/lucy/index.go b/go/lucy/index.go
index 338dffa..a562e5a 100644
--- a/go/lucy/index.go
+++ b/go/lucy/index.go
@@ -140,7 +140,7 @@ func (obj *implIndexer) AddDoc(doc interface{}) error {
 		fieldC := obj.findFieldC(field)
 		valueC := clownfish.NewString(value)
 		C.CFISH_Hash_Store(docFields,
-			(*C.cfish_Obj)(unsafe.Pointer(fieldC)),
+			(*C.cfish_String)(unsafe.Pointer(fieldC)),
 			C.cfish_inc_refcount(unsafe.Pointer(valueC.TOPTR())))
 	}
 


Mime
View raw message