Iter var change to match Vec_Get_Size.

Change the type of iteration variables to `size_t` to match the return
type of Vec_Get_Size.  This commit is restricted to simple cases where
the iteration variable is scoped to the loop block and is only used as
an C array index or arguments to Vector methods which take a `size_t`.


Project: http://git-wip-us.apache.org/repos/asf/lucy/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucy/commit/e2235fef
Tree: http://git-wip-us.apache.org/repos/asf/lucy/tree/e2235fef
Diff: http://git-wip-us.apache.org/repos/asf/lucy/diff/e2235fef

Branch: refs/heads/master
Commit: e2235fef95d2a1253ccba8405518e67a3445a99e
Parents: 8193b73
Author: Marvin Humphrey <mar...@rectangular.com>
Authored: Tue Mar 29 16:12:17 2016 -0700
Committer: Marvin Humphrey <mar...@rectangular.com>
Committed: Tue Mar 29 21:04:54 2016 -0700

----------------------------------------------------------------------
 core/Lucy/Analysis/PolyAnalyzer.c             |  4 +--
 core/Lucy/Highlight/Highlighter.c             |  4 +--
 core/Lucy/Index/BackgroundMerger.c            |  8 +++---
 core/Lucy/Index/DeletionsReader.c             |  4 +--
 core/Lucy/Index/DeletionsWriter.c             | 12 ++++----
 core/Lucy/Index/DocReader.c                   |  4 +--
 core/Lucy/Index/FilePurger.c                  | 14 +++++-----
 core/Lucy/Index/HighlightReader.c             |  4 +--
 core/Lucy/Index/IndexManager.c                |  2 +-
 core/Lucy/Index/Indexer.c                     | 12 ++++----
 core/Lucy/Index/Inverter.c                    |  2 +-
 core/Lucy/Index/LexiconReader.c               |  6 ++--
 core/Lucy/Index/PolyLexicon.c                 |  2 +-
 core/Lucy/Index/PolyReader.c                  |  8 +++---
 core/Lucy/Index/PostingListWriter.c           |  8 +++---
 core/Lucy/Index/SegWriter.c                   | 10 +++----
 core/Lucy/Index/Snapshot.c                    |  4 +--
 core/Lucy/Index/SortWriter.c                  |  6 ++--
 core/Lucy/Plan/Schema.c                       |  2 +-
 core/Lucy/Search/PhraseQuery.c                |  4 +--
 core/Lucy/Search/PolyQuery.c                  |  6 ++--
 core/Lucy/Search/QueryParser.c                | 32 +++++++++++-----------
 core/Lucy/Search/SortSpec.c                   |  2 +-
 core/Lucy/Store/CompoundFileWriter.c          |  4 +--
 core/Lucy/Store/Folder.c                      |  4 +--
 core/Lucy/Test/Analysis/TestNormalizer.c      |  4 +--
 core/Lucy/Test/Analysis/TestSnowballStemmer.c |  2 +-
 core/Lucy/Test/Index/TestSortWriter.c         |  2 +-
 core/Lucy/Test/Search/TestQueryParserSyntax.c |  2 +-
 core/Lucy/Test/Store/TestFSFolder.c           |  2 +-
 core/Lucy/Util/Freezer.c                      |  4 +--
 core/Lucy/Util/SortExternal.c                 |  4 +--
 core/LucyX/Search/ProximityQuery.c            |  4 +--
 33 files changed, 96 insertions(+), 96 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucy/blob/e2235fef/core/Lucy/Analysis/PolyAnalyzer.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Analysis/PolyAnalyzer.c 
b/core/Lucy/Analysis/PolyAnalyzer.c
index ac9960f..ac04bad 100644
--- a/core/Lucy/Analysis/PolyAnalyzer.c
+++ b/core/Lucy/Analysis/PolyAnalyzer.c
@@ -38,7 +38,7 @@ PolyAnalyzer_init(PolyAnalyzer *self, String *language,
     PolyAnalyzerIVARS *const ivars = PolyAnalyzer_IVARS(self);
 
     if (analyzers) {
-        for (uint32_t i = 0, max = Vec_Get_Size(analyzers); i < max; i++) {
+        for (size_t i = 0, max = Vec_Get_Size(analyzers); i < max; i++) {
             CERTIFY(Vec_Fetch(analyzers, i), ANALYZER);
         }
         ivars->analyzers = (Vector*)INCREF(analyzers);
@@ -74,7 +74,7 @@ PolyAnalyzer_Transform_IMP(PolyAnalyzer *self, Inversion 
*inversion) {
     (void)INCREF(inversion);
 
     // Iterate through each of the analyzers in order.
-    for (uint32_t i = 0, max = Vec_Get_Size(analyzers); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(analyzers); i < max; i++) {
         Analyzer *analyzer = (Analyzer*)Vec_Fetch(analyzers, i);
         Inversion *new_inversion = Analyzer_Transform(analyzer, inversion);
         DECREF(inversion);

http://git-wip-us.apache.org/repos/asf/lucy/blob/e2235fef/core/Lucy/Highlight/Highlighter.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Highlight/Highlighter.c 
b/core/Lucy/Highlight/Highlighter.c
index 6963823..edc0db5 100644
--- a/core/Lucy/Highlight/Highlighter.c
+++ b/core/Lucy/Highlight/Highlighter.c
@@ -200,7 +200,7 @@ S_hottest(HeatMap *heat_map) {
     float max_score = 0.0f;
     int32_t retval = 0;
     Vector *spans = HeatMap_Get_Spans(heat_map);
-    for (uint32_t i = Vec_Get_Size(spans); i--;) {
+    for (size_t i = Vec_Get_Size(spans); i--;) {
         Span *span = (Span*)Vec_Fetch(spans, i);
         if (Span_Get_Weight(span) >= max_score) {
             retval = Span_Get_Offset(span);
@@ -477,7 +477,7 @@ Highlighter_Highlight_Excerpt_IMP(Highlighter *self, Vector 
*spans,
     CharBuf        *encode_buf      = NULL;
     int32_t         raw_excerpt_end = top + Str_Length(raw_excerpt);
 
-    for (uint32_t i = 0, max = Vec_Get_Size(spans); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(spans); i < max; i++) {
         Span *span = (Span*)Vec_Fetch(spans, i);
         int32_t offset = Span_Get_Offset(span);
         if (offset < top) {

http://git-wip-us.apache.org/repos/asf/lucy/blob/e2235fef/core/Lucy/Index/BackgroundMerger.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/BackgroundMerger.c 
b/core/Lucy/Index/BackgroundMerger.c
index 5e17007..8e04daa 100644
--- a/core/Lucy/Index/BackgroundMerger.c
+++ b/core/Lucy/Index/BackgroundMerger.c
@@ -127,7 +127,7 @@ BGMerger_init(BackgroundMerger *self, Obj *index, 
IndexManager *manager) {
         = IxManager_Highest_Seg_Num(ivars->manager, ivars->snapshot) + 1;
     Vector *fields = Schema_All_Fields(ivars->schema);
     ivars->segment = Seg_new(new_seg_num);
-    for (uint32_t i = 0, max = Vec_Get_Size(fields); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(fields); i < max; i++) {
         Seg_Add_Field(ivars->segment, (String*)Vec_Fetch(fields, i));
     }
     DECREF(fields);
@@ -261,13 +261,13 @@ S_merge_updated_deletions(BackgroundMerger *self) {
         = PolyReader_Get_Seg_Readers(ivars->polyreader);
     Hash *new_segs = Hash_new(Vec_Get_Size(new_seg_readers));
 
-    for (uint32_t i = 0, max = Vec_Get_Size(new_seg_readers); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(new_seg_readers); i < max; i++) {
         SegReader *seg_reader = (SegReader*)Vec_Fetch(new_seg_readers, i);
         String    *seg_name   = SegReader_Get_Seg_Name(seg_reader);
         Hash_Store(new_segs, seg_name, INCREF(seg_reader));
     }
 
-    for (uint32_t i = 0, max = Vec_Get_Size(old_seg_readers); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(old_seg_readers); i < max; i++) {
         SegReader *seg_reader = (SegReader*)Vec_Fetch(old_seg_readers, i);
         String    *seg_name   = SegReader_Get_Seg_Name(seg_reader);
 
@@ -443,7 +443,7 @@ BGMerger_Prepare_Commit_IMP(BackgroundMerger *self) {
             // run this AFTER S_merge_updated_deletions, because otherwise
             // we couldn't tell whether the deletion counts changed.)
             Vector *files = Snapshot_List(latest_snapshot);
-            for (uint32_t i = 0, max = Vec_Get_Size(files); i < max; i++) {
+            for (size_t i = 0, max = Vec_Get_Size(files); i < max; i++) {
                 String *file = (String*)Vec_Fetch(files, i);
                 if (Str_Starts_With_Utf8(file, "seg_", 4)) {
                     int64_t gen = (int64_t)IxFileNames_extract_gen(file);

http://git-wip-us.apache.org/repos/asf/lucy/blob/e2235fef/core/Lucy/Index/DeletionsReader.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/DeletionsReader.c 
b/core/Lucy/Index/DeletionsReader.c
index c7118c5..2231024 100644
--- a/core/Lucy/Index/DeletionsReader.c
+++ b/core/Lucy/Index/DeletionsReader.c
@@ -61,7 +61,7 @@ PolyDelReader_init(PolyDeletionsReader *self, Vector *readers,
     DelReader_init((DeletionsReader*)self, NULL, NULL, NULL, NULL, -1);
     PolyDeletionsReaderIVARS *const ivars = PolyDelReader_IVARS(self);
     ivars->del_count = 0;
-    for (uint32_t i = 0, max = Vec_Get_Size(readers); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(readers); i < max; i++) {
         DeletionsReader *reader = (DeletionsReader*)CERTIFY(
                                       Vec_Fetch(readers, i), DELETIONSREADER);
         ivars->del_count += DelReader_Del_Count(reader);
@@ -75,7 +75,7 @@ void
 PolyDelReader_Close_IMP(PolyDeletionsReader *self) {
     PolyDeletionsReaderIVARS *const ivars = PolyDelReader_IVARS(self);
     if (ivars->readers) {
-        for (uint32_t i = 0, max = Vec_Get_Size(ivars->readers); i < max; i++) 
{
+        for (size_t i = 0, max = Vec_Get_Size(ivars->readers); i < max; i++) {
             DeletionsReader *reader
                 = (DeletionsReader*)Vec_Fetch(ivars->readers, i);
             if (reader) { DelReader_Close(reader); }

http://git-wip-us.apache.org/repos/asf/lucy/blob/e2235fef/core/Lucy/Index/DeletionsWriter.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/DeletionsWriter.c 
b/core/Lucy/Index/DeletionsWriter.c
index ec36fbf..23de833 100644
--- a/core/Lucy/Index/DeletionsWriter.c
+++ b/core/Lucy/Index/DeletionsWriter.c
@@ -144,7 +144,7 @@ DefDelWriter_Finish_IMP(DefaultDeletionsWriter *self) {
     DefaultDeletionsWriterIVARS *const ivars = DefDelWriter_IVARS(self);
     Folder *const folder = ivars->folder;
 
-    for (uint32_t i = 0, max = Vec_Get_Size(ivars->seg_readers); i < max; i++) 
{
+    for (size_t i = 0, max = Vec_Get_Size(ivars->seg_readers); i < max; i++) {
         SegReader *seg_reader = (SegReader*)Vec_Fetch(ivars->seg_readers, i);
         if (ivars->updated[i]) {
             BitVector *deldocs   = (BitVector*)Vec_Fetch(ivars->bit_vecs, i);
@@ -181,7 +181,7 @@ DefDelWriter_Metadata_IMP(DefaultDeletionsWriter *self) {
     Hash    *const metadata = super_meta(self);
     Hash    *const files    = Hash_new(0);
 
-    for (uint32_t i = 0, max = Vec_Get_Size(ivars->seg_readers); i < max; i++) 
{
+    for (size_t i = 0, max = Vec_Get_Size(ivars->seg_readers); i < max; i++) {
         SegReader *seg_reader = (SegReader*)Vec_Fetch(ivars->seg_readers, i);
         if (ivars->updated[i]) {
             BitVector *deldocs   = (BitVector*)Vec_Fetch(ivars->bit_vecs, i);
@@ -250,7 +250,7 @@ void
 DefDelWriter_Delete_By_Term_IMP(DefaultDeletionsWriter *self,
                                 String *field, Obj *term) {
     DefaultDeletionsWriterIVARS *const ivars = DefDelWriter_IVARS(self);
-    for (uint32_t i = 0, max = Vec_Get_Size(ivars->seg_readers); i < max; i++) 
{
+    for (size_t i = 0, max = Vec_Get_Size(ivars->seg_readers); i < max; i++) {
         SegReader *seg_reader = (SegReader*)Vec_Fetch(ivars->seg_readers, i);
         PostingListReader *plist_reader
             = (PostingListReader*)SegReader_Fetch(
@@ -280,7 +280,7 @@ DefDelWriter_Delete_By_Query_IMP(DefaultDeletionsWriter 
*self, Query *query) {
     Compiler *compiler = Query_Make_Compiler(query, (Searcher*)ivars->searcher,
                                              Query_Get_Boost(query), false);
 
-    for (uint32_t i = 0, max = Vec_Get_Size(ivars->seg_readers); i < max; i++) 
{
+    for (size_t i = 0, max = Vec_Get_Size(ivars->seg_readers); i < max; i++) {
         SegReader *seg_reader = (SegReader*)Vec_Fetch(ivars->seg_readers, i);
         BitVector *bit_vec = (BitVector*)Vec_Fetch(ivars->bit_vecs, i);
         Matcher *matcher = Compiler_Make_Matcher(compiler, seg_reader, false);
@@ -320,7 +320,7 @@ DefDelWriter_Delete_By_Doc_ID_IMP(DefaultDeletionsWriter 
*self, int32_t doc_id)
 bool
 DefDelWriter_Updated_IMP(DefaultDeletionsWriter *self) {
     DefaultDeletionsWriterIVARS *const ivars = DefDelWriter_IVARS(self);
-    for (uint32_t i = 0, max = Vec_Get_Size(ivars->seg_readers); i < max; i++) 
{
+    for (size_t i = 0, max = Vec_Get_Size(ivars->seg_readers); i < max; i++) {
         if (ivars->updated[i]) { return true; }
     }
     return false;
@@ -358,7 +358,7 @@ DefDelWriter_Merge_Segment_IMP(DefaultDeletionsWriter *self,
                  * we're adding correspond to.  If it's gone, we don't
                  * need to worry about losing deletions files that point
                  * at it. */
-                for (uint32_t i = 0, max = Vec_Get_Size(seg_readers); i < max; 
i++) {
+                for (size_t i = 0, max = Vec_Get_Size(seg_readers); i < max; 
i++) {
                     SegReader *candidate
                         = (SegReader*)Vec_Fetch(seg_readers, i);
                     String *candidate_name

http://git-wip-us.apache.org/repos/asf/lucy/blob/e2235fef/core/Lucy/Index/DocReader.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/DocReader.c b/core/Lucy/Index/DocReader.c
index 8759074..afec004 100644
--- a/core/Lucy/Index/DocReader.c
+++ b/core/Lucy/Index/DocReader.c
@@ -54,7 +54,7 @@ PolyDocReader*
 PolyDocReader_init(PolyDocReader *self, Vector *readers, I32Array *offsets) {
     DocReader_init((DocReader*)self, NULL, NULL, NULL, NULL, -1);
     PolyDocReaderIVARS *const ivars = PolyDocReader_IVARS(self);
-    for (uint32_t i = 0, max = Vec_Get_Size(readers); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(readers); i < max; i++) {
         CERTIFY(Vec_Fetch(readers, i), DOCREADER);
     }
     ivars->readers = (Vector*)INCREF(readers);
@@ -66,7 +66,7 @@ void
 PolyDocReader_Close_IMP(PolyDocReader *self) {
     PolyDocReaderIVARS *const ivars = PolyDocReader_IVARS(self);
     if (ivars->readers) {
-        for (uint32_t i = 0, max = Vec_Get_Size(ivars->readers); i < max; i++) 
{
+        for (size_t i = 0, max = Vec_Get_Size(ivars->readers); i < max; i++) {
             DocReader *reader = (DocReader*)Vec_Fetch(ivars->readers, i);
             if (reader) { DocReader_Close(reader); }
         }

http://git-wip-us.apache.org/repos/asf/lucy/blob/e2235fef/core/Lucy/Index/FilePurger.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/FilePurger.c b/core/Lucy/Index/FilePurger.c
index c02dee8..0278434 100644
--- a/core/Lucy/Index/FilePurger.c
+++ b/core/Lucy/Index/FilePurger.c
@@ -96,7 +96,7 @@ FilePurger_Purge_IMP(FilePurger *self) {
         // again later.  Proceed in reverse lexical order so that directories
         // get deleted after they've been emptied.
         Vec_Sort(purgables);
-        for (uint32_t i = Vec_Get_Size(purgables); i--;) {
+        for (size_t i = Vec_Get_Size(purgables); i--;) {
             String *entry = (String*)Vec_Fetch(purgables, i);
             if (Hash_Fetch(ivars->disallowed, entry)) { continue; }
             if (!Folder_Delete(folder, entry)) {
@@ -106,14 +106,14 @@ FilePurger_Purge_IMP(FilePurger *self) {
             }
         }
 
-        for (uint32_t i = 0, max = Vec_Get_Size(snapshots); i < max; i++) {
+        for (size_t i = 0, max = Vec_Get_Size(snapshots); i < max; i++) {
             Snapshot *snapshot = (Snapshot*)Vec_Fetch(snapshots, i);
             bool snapshot_has_failures = false;
             if (Hash_Get_Size(failures)) {
                 // Only delete snapshot files if all of their entries were
                 // successfully deleted.
                 Vector *entries = Snapshot_List(snapshot);
-                for (uint32_t j = Vec_Get_Size(entries); j--;) {
+                for (size_t j = Vec_Get_Size(entries); j--;) {
                     String *entry = (String*)Vec_Fetch(entries, j);
                     if (Hash_Fetch(failures, entry)) {
                         snapshot_has_failures = true;
@@ -240,7 +240,7 @@ S_discover_unused(FilePurger *self, Vector **purgables_ptr,
             else {
                 // No one's using this snapshot, so all of its entries are
                 // candidates for deletion.
-                for (uint32_t i = 0, max = Vec_Get_Size(referenced); i < max; 
i++) {
+                for (size_t i = 0, max = Vec_Get_Size(referenced); i < max; 
i++) {
                     String *file = (String*)Vec_Fetch(referenced, i);
                     Hash_Store(candidates, file, (Obj*)CFISH_TRUE);
                 }
@@ -260,7 +260,7 @@ S_discover_unused(FilePurger *self, Vector **purgables_ptr,
     S_zap_dead_merge(self, candidates);
 
     // Eliminate any current files from the list of files to be purged.
-    for (uint32_t i = 0, max = Vec_Get_Size(spared); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(spared); i < max; i++) {
         String *filename = (String*)Vec_Fetch(spared, i);
         DECREF(Hash_Delete(candidates, filename));
     }
@@ -276,12 +276,12 @@ S_discover_unused(FilePurger *self, Vector 
**purgables_ptr,
 static Vector*
 S_find_all_referenced(Folder *folder, Vector *entries) {
     Hash *uniqued = Hash_new(Vec_Get_Size(entries));
-    for (uint32_t i = 0, max = Vec_Get_Size(entries); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(entries); i < max; i++) {
         String *entry = (String*)Vec_Fetch(entries, i);
         Hash_Store(uniqued, entry, (Obj*)CFISH_TRUE);
         if (Folder_Is_Directory(folder, entry)) {
             Vector *contents = Folder_List_R(folder, entry);
-            for (uint32_t j = Vec_Get_Size(contents); j--;) {
+            for (size_t j = Vec_Get_Size(contents); j--;) {
                 String *sub_entry = (String*)Vec_Fetch(contents, j);
                 Hash_Store(uniqued, sub_entry, (Obj*)CFISH_TRUE);
             }

http://git-wip-us.apache.org/repos/asf/lucy/blob/e2235fef/core/Lucy/Index/HighlightReader.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/HighlightReader.c 
b/core/Lucy/Index/HighlightReader.c
index 0d443c3..2afaf66 100644
--- a/core/Lucy/Index/HighlightReader.c
+++ b/core/Lucy/Index/HighlightReader.c
@@ -61,7 +61,7 @@ PolyHLReader_init(PolyHighlightReader *self, Vector *readers,
                   I32Array *offsets) {
     HLReader_init((HighlightReader*)self, NULL, NULL, NULL, NULL, -1);
     PolyHighlightReaderIVARS *const ivars = PolyHLReader_IVARS(self);
-    for (uint32_t i = 0, max = Vec_Get_Size(readers); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(readers); i < max; i++) {
         CERTIFY(Vec_Fetch(readers, i), HIGHLIGHTREADER);
     }
     ivars->readers = (Vector*)INCREF(readers);
@@ -73,7 +73,7 @@ void
 PolyHLReader_Close_IMP(PolyHighlightReader *self) {
     PolyHighlightReaderIVARS *const ivars = PolyHLReader_IVARS(self);
     if (ivars->readers) {
-        for (uint32_t i = 0, max = Vec_Get_Size(ivars->readers); i < max; i++) 
{
+        for (size_t i = 0, max = Vec_Get_Size(ivars->readers); i < max; i++) {
             HighlightReader *sub_reader
                 = (HighlightReader*)Vec_Fetch(ivars->readers, i);
             if (sub_reader) { HLReader_Close(sub_reader); }

http://git-wip-us.apache.org/repos/asf/lucy/blob/e2235fef/core/Lucy/Index/IndexManager.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/IndexManager.c b/core/Lucy/Index/IndexManager.c
index 7823499..af6205f 100644
--- a/core/Lucy/Index/IndexManager.c
+++ b/core/Lucy/Index/IndexManager.c
@@ -72,7 +72,7 @@ IxManager_Highest_Seg_Num_IMP(IndexManager *self, Snapshot 
*snapshot) {
     Vector *files = Snapshot_List(snapshot);
     uint64_t highest_seg_num = 0;
     UNUSED_VAR(self);
-    for (uint32_t i = 0, max = Vec_Get_Size(files); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(files); i < max; i++) {
         String *file = (String*)Vec_Fetch(files, i);
         if (Seg_valid_seg_name(file)) {
             uint64_t seg_num = IxFileNames_extract_gen(file);

http://git-wip-us.apache.org/repos/asf/lucy/blob/e2235fef/core/Lucy/Index/Indexer.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/Indexer.c b/core/Lucy/Index/Indexer.c
index 103098f..1cf4df5 100644
--- a/core/Lucy/Index/Indexer.c
+++ b/core/Lucy/Index/Indexer.c
@@ -196,7 +196,7 @@ Indexer_init(Indexer *self, Schema *schema, Obj *index,
 
     // Add all known fields to Segment.
     Vector *fields = Schema_All_Fields(schema);
-    for (uint32_t i = 0, max = Vec_Get_Size(fields); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(fields); i < max; i++) {
         Seg_Add_Field(ivars->segment, (String*)Vec_Fetch(fields, i));
     }
     DECREF(fields);
@@ -344,14 +344,14 @@ Indexer_Add_Index_IMP(Indexer *self, Obj *index) {
         Schema_Eat(schema, other_schema);
 
         // Add fields to Segment.
-        for (uint32_t i = 0, max = Vec_Get_Size(other_fields); i < max; i++) {
+        for (size_t i = 0, max = Vec_Get_Size(other_fields); i < max; i++) {
             String *other_field = (String*)Vec_Fetch(other_fields, i);
             Seg_Add_Field(ivars->segment, other_field);
         }
         DECREF(other_fields);
 
         // Add all segments.
-        for (uint32_t i = 0, max = Vec_Get_Size(seg_readers); i < max; i++) {
+        for (size_t i = 0, max = Vec_Get_Size(seg_readers); i < max; i++) {
             SegReader *seg_reader = (SegReader*)Vec_Fetch(seg_readers, i);
             DeletionsReader *del_reader
                 = (DeletionsReader*)SegReader_Fetch(
@@ -383,7 +383,7 @@ static String*
 S_find_schema_file(Snapshot *snapshot) {
     Vector *files = Snapshot_List(snapshot);
     String *retval = NULL;
-    for (uint32_t i = 0, max = Vec_Get_Size(files); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(files); i < max; i++) {
         String *file = (String*)Vec_Fetch(files, i);
         if (Str_Starts_With_Utf8(file, "schema_", 7)
             && Str_Ends_With_Utf8(file, ".json", 5)
@@ -434,7 +434,7 @@ S_maybe_merge(Indexer *self, Vector *seg_readers) {
                                          ivars->del_writer, cutoff, 
ivars->optimize);
 
     Hash *seen = Hash_new(Vec_Get_Size(to_merge));
-    for (uint32_t i = 0, max = Vec_Get_Size(to_merge); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(to_merge); i < max; i++) {
         SegReader *seg_reader
             = (SegReader*)CERTIFY(Vec_Fetch(to_merge, i), SEGREADER);
         String *seg_name = SegReader_Get_Seg_Name(seg_reader);
@@ -449,7 +449,7 @@ S_maybe_merge(Indexer *self, Vector *seg_readers) {
     DECREF(seen);
 
     // Consolidate segments if either sparse or optimizing forced.
-    for (uint32_t i = 0, max = Vec_Get_Size(to_merge); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(to_merge); i < max; i++) {
         SegReader *seg_reader = (SegReader*)Vec_Fetch(to_merge, i);
         int64_t seg_num = SegReader_Get_Seg_Num(seg_reader);
         Matcher *deletions

http://git-wip-us.apache.org/repos/asf/lucy/blob/e2235fef/core/Lucy/Index/Inverter.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/Inverter.c b/core/Lucy/Index/Inverter.c
index c6d1873..245b937 100644
--- a/core/Lucy/Index/Inverter.c
+++ b/core/Lucy/Index/Inverter.c
@@ -182,7 +182,7 @@ Inverter_Add_Field_IMP(Inverter *self, InverterEntry 
*entry) {
 void
 Inverter_Clear_IMP(Inverter *self) {
     InverterIVARS *const ivars = Inverter_IVARS(self);
-    for (uint32_t i = 0, max = Vec_Get_Size(ivars->entries); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(ivars->entries); i < max; i++) {
         InvEntry_Clear((InverterEntry*)Vec_Fetch(ivars->entries, i));
     }
     Vec_Clear(ivars->entries);

http://git-wip-us.apache.org/repos/asf/lucy/blob/e2235fef/core/Lucy/Index/LexiconReader.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/LexiconReader.c b/core/Lucy/Index/LexiconReader.c
index 7450222..865da34 100644
--- a/core/Lucy/Index/LexiconReader.c
+++ b/core/Lucy/Index/LexiconReader.c
@@ -56,7 +56,7 @@ PolyLexiconReader*
 PolyLexReader_init(PolyLexiconReader *self, Vector *readers,
                    I32Array *offsets) {
     Schema *schema = NULL;
-    for (uint32_t i = 0, max = Vec_Get_Size(readers); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(readers); i < max; i++) {
         LexiconReader *reader
             = (LexiconReader*)CERTIFY(Vec_Fetch(readers, i), LEXICONREADER);
         if (!schema) { schema = LexReader_Get_Schema(reader); }
@@ -72,7 +72,7 @@ void
 PolyLexReader_Close_IMP(PolyLexiconReader *self) {
     PolyLexiconReaderIVARS *const ivars = PolyLexReader_IVARS(self);
     if (ivars->readers) {
-        for (uint32_t i = 0, max = Vec_Get_Size(ivars->readers); i < max; i++) 
{
+        for (size_t i = 0, max = Vec_Get_Size(ivars->readers); i < max; i++) {
             LexiconReader *reader
                 = (LexiconReader*)Vec_Fetch(ivars->readers, i);
             if (reader) { LexReader_Close(reader); }
@@ -116,7 +116,7 @@ PolyLexReader_Doc_Freq_IMP(PolyLexiconReader *self, String 
*field,
                            Obj *term) {
     PolyLexiconReaderIVARS *const ivars = PolyLexReader_IVARS(self);
     uint32_t doc_freq = 0;
-    for (uint32_t i = 0, max = Vec_Get_Size(ivars->readers); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(ivars->readers); i < max; i++) {
         LexiconReader *reader = (LexiconReader*)Vec_Fetch(ivars->readers, i);
         if (reader) {
             doc_freq += LexReader_Doc_Freq(reader, field, term);

http://git-wip-us.apache.org/repos/asf/lucy/blob/e2235fef/core/Lucy/Index/PolyLexicon.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/PolyLexicon.c b/core/Lucy/Index/PolyLexicon.c
index 61e1976..21bd73e 100644
--- a/core/Lucy/Index/PolyLexicon.c
+++ b/core/Lucy/Index/PolyLexicon.c
@@ -81,7 +81,7 @@ S_refresh_lex_q(SegLexQueue *lex_q, Vector *seg_lexicons, Obj 
*target) {
     }
 
     // Refill the queue.
-    for (uint32_t i = 0, max = Vec_Get_Size(seg_lexicons); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(seg_lexicons); i < max; i++) {
         SegLexicon *const seg_lexicon
             = (SegLexicon*)Vec_Fetch(seg_lexicons, i);
         SegLex_Seek(seg_lexicon, target);

http://git-wip-us.apache.org/repos/asf/lucy/blob/e2235fef/core/Lucy/Index/PolyReader.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/PolyReader.c b/core/Lucy/Index/PolyReader.c
index b20409a..7cd8a22 100644
--- a/core/Lucy/Index/PolyReader.c
+++ b/core/Lucy/Index/PolyReader.c
@@ -93,7 +93,7 @@ PolyReader_open(Obj *index, Snapshot *snapshot, IndexManager 
*manager) {
 
 static Obj*
 S_first_non_null(Vector *array) {
-    for (uint32_t i = 0, max = Vec_Get_Size(array); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(array); i < max; i++) {
         Obj *thing = Vec_Fetch(array, i);
         if (thing) { return thing; }
     }
@@ -191,7 +191,7 @@ PolyReader_Close_IMP(PolyReader *self) {
     PolyReaderIVARS *const ivars = PolyReader_IVARS(self);
     PolyReader_Close_t super_close
         = SUPER_METHOD_PTR(POLYREADER, LUCY_PolyReader_Close);
-    for (uint32_t i = 0, max = Vec_Get_Size(ivars->sub_readers); i < max; i++) 
{
+    for (size_t i = 0, max = Vec_Get_Size(ivars->sub_readers); i < max; i++) {
         SegReader *seg_reader = (SegReader*)Vec_Fetch(ivars->sub_readers, i);
         SegReader_Close(seg_reader);
     }
@@ -234,7 +234,7 @@ S_try_open_elements(void *context) {
     String     *schema_file       = NULL;
 
     // Find schema file, count segments.
-    for (uint32_t i = 0, max = Vec_Get_Size(files); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(files); i < max; i++) {
         String *entry = (String*)Vec_Fetch(files, i);
 
         if (Seg_valid_seg_name(entry)) {
@@ -272,7 +272,7 @@ S_try_open_elements(void *context) {
     }
 
     Vector *segments = Vec_new(num_segs);
-    for (uint32_t i = 0, max = Vec_Get_Size(files); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(files); i < max; i++) {
         String *entry = (String*)Vec_Fetch(files, i);
 
         // Create a Segment for each segmeta.

http://git-wip-us.apache.org/repos/asf/lucy/blob/e2235fef/core/Lucy/Index/PostingListWriter.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/PostingListWriter.c 
b/core/Lucy/Index/PostingListWriter.c
index 3c6e6aa..68c10f5 100644
--- a/core/Lucy/Index/PostingListWriter.c
+++ b/core/Lucy/Index/PostingListWriter.c
@@ -167,7 +167,7 @@ PListWriter_Add_Inverted_Doc_IMP(PostingListWriter *self, 
Inverter *inverter,
     // flush all of them, then release all the RawPostings with a single
     // action.
     if (MemPool_Get_Consumed(ivars->mem_pool) > ivars->mem_thresh) {
-        for (uint32_t i = 0, max = Vec_Get_Size(ivars->pools); i < max; i++) {
+        for (size_t i = 0, max = Vec_Get_Size(ivars->pools); i < max; i++) {
             PostingPool *const pool = (PostingPool*)Vec_Fetch(ivars->pools, i);
             if (pool) { PostPool_Flush(pool); }
         }
@@ -185,7 +185,7 @@ PListWriter_Add_Segment_IMP(PostingListWriter *self, 
SegReader *reader,
     Vector  *all_fields    = Schema_All_Fields(schema);
     S_lazy_init(self);
 
-    for (uint32_t i = 0, max = Vec_Get_Size(all_fields); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(all_fields); i < max; i++) {
         String    *field = (String*)Vec_Fetch(all_fields, i);
         FieldType *type  = Schema_Fetch_Type(schema, field);
         int32_t old_field_num = Seg_Field_Num(other_segment, field);
@@ -223,13 +223,13 @@ PListWriter_Finish_IMP(PostingListWriter *self) {
     OutStream_Close(ivars->post_temp_out);
 
     // Try to free up some memory.
-    for (uint32_t i = 0, max = Vec_Get_Size(ivars->pools); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(ivars->pools); i < max; i++) {
         PostingPool *pool = (PostingPool*)Vec_Fetch(ivars->pools, i);
         if (pool) { PostPool_Shrink(pool); }
     }
 
     // Write postings for each field.
-    for (uint32_t i = 0, max = Vec_Get_Size(ivars->pools); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(ivars->pools); i < max; i++) {
         PostingPool *pool = (PostingPool*)Vec_Delete(ivars->pools, i);
         if (pool) {
             // Write out content for each PostingPool.  Let each PostingPool

http://git-wip-us.apache.org/repos/asf/lucy/blob/e2235fef/core/Lucy/Index/SegWriter.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/SegWriter.c b/core/Lucy/Index/SegWriter.c
index 67c87e4..e67df82 100644
--- a/core/Lucy/Index/SegWriter.c
+++ b/core/Lucy/Index/SegWriter.c
@@ -115,7 +115,7 @@ void
 SegWriter_Add_Inverted_Doc_IMP(SegWriter *self, Inverter *inverter,
                                int32_t doc_id) {
     SegWriterIVARS *const ivars = SegWriter_IVARS(self);
-    for (uint32_t i = 0, max = Vec_Get_Size(ivars->writers); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(ivars->writers); i < max; i++) {
         DataWriter *writer = (DataWriter*)Vec_Fetch(ivars->writers, i);
         DataWriter_Add_Inverted_Doc(writer, inverter, doc_id);
     }
@@ -140,7 +140,7 @@ SegWriter_Add_Segment_IMP(SegWriter *self, SegReader 
*reader,
     SegWriterIVARS *const ivars = SegWriter_IVARS(self);
 
     // Bulk add the slab of documents to the various writers.
-    for (uint32_t i = 0, max = Vec_Get_Size(ivars->writers); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(ivars->writers); i < max; i++) {
         DataWriter *writer = (DataWriter*)Vec_Fetch(ivars->writers, i);
         DataWriter_Add_Segment(writer, reader, doc_map);
     }
@@ -161,7 +161,7 @@ SegWriter_Merge_Segment_IMP(SegWriter *self, SegReader 
*reader,
     String   *seg_name = Seg_Get_Name(SegReader_Get_Segment(reader));
 
     // Have all the sub-writers merge the segment.
-    for (uint32_t i = 0, max = Vec_Get_Size(ivars->writers); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(ivars->writers); i < max; i++) {
         DataWriter *writer = (DataWriter*)Vec_Fetch(ivars->writers, i);
         DataWriter_Merge_Segment(writer, reader, doc_map);
     }
@@ -181,7 +181,7 @@ SegWriter_Delete_Segment_IMP(SegWriter *self, SegReader 
*reader) {
     String   *seg_name = Seg_Get_Name(SegReader_Get_Segment(reader));
 
     // Have all the sub-writers delete the segment.
-    for (uint32_t i = 0, max = Vec_Get_Size(ivars->writers); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(ivars->writers); i < max; i++) {
         DataWriter *writer = (DataWriter*)Vec_Fetch(ivars->writers, i);
         DataWriter_Delete_Segment(writer, reader);
     }
@@ -197,7 +197,7 @@ SegWriter_Finish_IMP(SegWriter *self) {
     String *seg_name = Seg_Get_Name(ivars->segment);
 
     // Finish off children.
-    for (uint32_t i = 0, max = Vec_Get_Size(ivars->writers); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(ivars->writers); i < max; i++) {
         DataWriter *writer = (DataWriter*)Vec_Fetch(ivars->writers, i);
         DataWriter_Finish(writer);
     }

http://git-wip-us.apache.org/repos/asf/lucy/blob/e2235fef/core/Lucy/Index/Snapshot.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/Snapshot.c b/core/Lucy/Index/Snapshot.c
index c43ffec..a67379b 100644
--- a/core/Lucy/Index/Snapshot.c
+++ b/core/Lucy/Index/Snapshot.c
@@ -141,7 +141,7 @@ Snapshot_Read_File_IMP(Snapshot *self, Folder *folder, 
String *path) {
             list = cleaned;
         }
         Hash_Clear(ivars->entries);
-        for (uint32_t i = 0, max = Vec_Get_Size(list); i < max; i++) {
+        for (size_t i = 0, max = Vec_Get_Size(list); i < max; i++) {
             String *entry
                 = (String*)CERTIFY(Vec_Fetch(list, i), STRING);
             Hash_Store(ivars->entries, entry, (Obj*)CFISH_TRUE);
@@ -160,7 +160,7 @@ S_clean_segment_contents(Vector *orig) {
     // within segment directories being listed.  Filter these files because
     // they cause a problem with FilePurger.
     Vector *cleaned = Vec_new(Vec_Get_Size(orig));
-    for (uint32_t i = 0, max = Vec_Get_Size(orig); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(orig); i < max; i++) {
         String *name = (String*)Vec_Fetch(orig, i);
         if (!Seg_valid_seg_name(name)) {
             if (Str_Starts_With_Utf8(name, "seg_", 4)) {

http://git-wip-us.apache.org/repos/asf/lucy/blob/e2235fef/core/Lucy/Index/SortWriter.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/SortWriter.c b/core/Lucy/Index/SortWriter.c
index 34be6eb..1b937c3 100644
--- a/core/Lucy/Index/SortWriter.c
+++ b/core/Lucy/Index/SortWriter.c
@@ -152,7 +152,7 @@ SortWriter_Add_Inverted_Doc_IMP(SortWriter *self, Inverter 
*inverter,
     // flush all of them, then reset the counter which tracks memory
     // consumption.
     if ((size_t)Counter_Get_Value(ivars->counter) > ivars->mem_thresh) {
-        for (uint32_t i = 0; i < Vec_Get_Size(ivars->field_writers); i++) {
+        for (size_t i = 0; i < Vec_Get_Size(ivars->field_writers); i++) {
             SortFieldWriter *const field_writer
                 = (SortFieldWriter*)Vec_Fetch(ivars->field_writers, i);
             if (field_writer) { SortFieldWriter_Flush(field_writer); }
@@ -169,7 +169,7 @@ SortWriter_Add_Segment_IMP(SortWriter *self, SegReader 
*reader,
     Vector *fields = Schema_All_Fields(ivars->schema);
 
     // Proceed field-at-a-time, rather than doc-at-a-time.
-    for (uint32_t i = 0, max = Vec_Get_Size(fields); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(fields); i < max; i++) {
         String *field = (String*)Vec_Fetch(fields, i);
         SortReader *sort_reader = (SortReader*)SegReader_Fetch(
                                       reader, Class_Get_Name(SORTREADER));
@@ -199,7 +199,7 @@ SortWriter_Finish_IMP(SortWriter *self) {
     // If we've either flushed or added segments, flush everything so that any
     // one field can use the entire margin up to mem_thresh.
     if (ivars->flush_at_finish) {
-        for (uint32_t i = 1, max = Vec_Get_Size(field_writers); i < max; i++) {
+        for (size_t i = 1, max = Vec_Get_Size(field_writers); i < max; i++) {
             SortFieldWriter *field_writer
                 = (SortFieldWriter*)Vec_Fetch(field_writers, i);
             if (field_writer) {

http://git-wip-us.apache.org/repos/asf/lucy/blob/e2235fef/core/Lucy/Plan/Schema.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Plan/Schema.c b/core/Lucy/Plan/Schema.c
index 5f6c538..bfcbebe 100644
--- a/core/Lucy/Plan/Schema.c
+++ b/core/Lucy/Plan/Schema.c
@@ -85,7 +85,7 @@ Schema_Destroy_IMP(Schema *self) {
 static void
 S_add_unique(Vector *array, Obj *elem) {
     if (!elem) { return; }
-    for (uint32_t i = 0, max = Vec_Get_Size(array); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(array); i < max; i++) {
         Obj *candidate = Vec_Fetch(array, i);
         if (!candidate) { continue; }
         if (elem == candidate) { return; }

http://git-wip-us.apache.org/repos/asf/lucy/blob/e2235fef/core/Lucy/Search/PhraseQuery.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Search/PhraseQuery.c b/core/Lucy/Search/PhraseQuery.c
index a453cfd..e8ddeea 100644
--- a/core/Lucy/Search/PhraseQuery.c
+++ b/core/Lucy/Search/PhraseQuery.c
@@ -68,7 +68,7 @@ static PhraseQuery*
 S_do_init(PhraseQuery *self, String *field, Vector *terms, float boost) {
     Query_init((Query*)self, boost);
     PhraseQueryIVARS *const ivars = PhraseQuery_IVARS(self);
-    for (uint32_t i = 0, max = Vec_Get_Size(terms); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(terms); i < max; i++) {
         CERTIFY(Vec_Fetch(terms, i), OBJ);
     }
     ivars->field = field;
@@ -218,7 +218,7 @@ PhraseCompiler_init(PhraseCompiler *self, PhraseQuery 
*parent,
 
     // Store IDF for the phrase.
     ivars->idf = 0;
-    for (uint32_t i = 0, max = Vec_Get_Size(terms); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(terms); i < max; i++) {
         Obj     *term     = Vec_Fetch(terms, i);
         int32_t  doc_max  = Searcher_Doc_Max(searcher);
         int32_t  doc_freq = Searcher_Doc_Freq(searcher, parent_ivars->field, 
term);

http://git-wip-us.apache.org/repos/asf/lucy/blob/e2235fef/core/Lucy/Search/PolyQuery.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Search/PolyQuery.c b/core/Lucy/Search/PolyQuery.c
index 0897c0f..c6ae993 100644
--- a/core/Lucy/Search/PolyQuery.c
+++ b/core/Lucy/Search/PolyQuery.c
@@ -164,7 +164,7 @@ PolyCompiler_Sum_Of_Squared_Weights_IMP(PolyCompiler *self) 
{
     float sum      = 0;
     float my_boost = PolyCompiler_Get_Boost(self);
 
-    for (uint32_t i = 0, max = Vec_Get_Size(ivars->children); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(ivars->children); i < max; i++) {
         Compiler *child = (Compiler*)Vec_Fetch(ivars->children, i);
         sum += Compiler_Sum_Of_Squared_Weights(child);
     }
@@ -178,7 +178,7 @@ PolyCompiler_Sum_Of_Squared_Weights_IMP(PolyCompiler *self) 
{
 void
 PolyCompiler_Apply_Norm_Factor_IMP(PolyCompiler *self, float factor) {
     PolyCompilerIVARS *const ivars = PolyCompiler_IVARS(self);
-    for (uint32_t i = 0, max = Vec_Get_Size(ivars->children); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(ivars->children); i < max; i++) {
         Compiler *child = (Compiler*)Vec_Fetch(ivars->children, i);
         Compiler_Apply_Norm_Factor(child, factor);
     }
@@ -189,7 +189,7 @@ PolyCompiler_Highlight_Spans_IMP(PolyCompiler *self, 
Searcher *searcher,
                                  DocVector *doc_vec, String *field) {
     PolyCompilerIVARS *const ivars = PolyCompiler_IVARS(self);
     Vector *spans = Vec_new(0);
-    for (uint32_t i = 0, max = Vec_Get_Size(ivars->children); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(ivars->children); i < max; i++) {
         Compiler *child = (Compiler*)Vec_Fetch(ivars->children, i);
         Vector *child_spans = Compiler_Highlight_Spans(child, searcher,
                                                        doc_vec, field);

http://git-wip-us.apache.org/repos/asf/lucy/blob/e2235fef/core/Lucy/Search/QueryParser.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Search/QueryParser.c b/core/Lucy/Search/QueryParser.c
index 40ba903..3042231 100644
--- a/core/Lucy/Search/QueryParser.c
+++ b/core/Lucy/Search/QueryParser.c
@@ -120,7 +120,7 @@ QParser_init(QueryParser *self, Schema *schema, Analyzer 
*analyzer,
 
     if (fields) {
         ivars->fields = Vec_Clone(fields);
-        for (uint32_t i = 0, max = Vec_Get_Size(fields); i < max; i++) {
+        for (size_t i = 0, max = Vec_Get_Size(fields); i < max; i++) {
             CERTIFY(Vec_Fetch(fields, i), STRING);
         }
         Vec_Sort(ivars->fields);
@@ -361,7 +361,7 @@ S_compose_inner_queries(QueryParser *self, Vector *elems,
     const int32_t default_occur = QParser_IVARS(self)->default_occur;
 
     // Generate all queries.  Apply any fields.
-    for (uint32_t i = Vec_Get_Size(elems); i--;) {
+    for (size_t i = Vec_Get_Size(elems); i--;) {
         String *field = default_field;
         ParserElem *elem = (ParserElem*)Vec_Fetch(elems, i);
 
@@ -391,10 +391,10 @@ S_compose_inner_queries(QueryParser *self, Vector *elems,
 static void
 S_apply_plusses_and_negations(QueryParser *self, Vector *elems) {
     UNUSED_VAR(self);
-    for (uint32_t i = Vec_Get_Size(elems); i--;) {
+    for (size_t i = Vec_Get_Size(elems); i--;) {
         ParserElem *elem = (ParserElem*)Vec_Fetch(elems, i);
         if (ParserElem_Get_Type(elem) == TOKEN_QUERY) {
-            for (uint32_t j = i; j--;) {
+            for (size_t j = i; j--;) {
                 ParserElem *prev = (ParserElem*)Vec_Fetch(elems, j);
                 uint32_t prev_type = ParserElem_Get_Type(prev);
                 if (prev_type == TOKEN_MINUS || prev_type == TOKEN_NOT) {
@@ -413,7 +413,7 @@ S_apply_plusses_and_negations(QueryParser *self, Vector 
*elems) {
 
 static void
 S_compose_not_queries(QueryParser *self, Vector *elems) {
-    for (uint32_t i = 0, max = Vec_Get_Size(elems); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(elems); i < max; i++) {
         ParserElem *elem = (ParserElem*)Vec_Fetch(elems, i);
         if (ParserElem_Get_Type(elem) == TOKEN_QUERY
             && ParserElem_Negated(elem)
@@ -429,7 +429,7 @@ S_compose_not_queries(QueryParser *self, Vector *elems) {
 static void
 S_winnow_boolops(QueryParser *self, Vector *elems) {
     UNUSED_VAR(self);
-    for (uint32_t i = 0; i < Vec_Get_Size(elems); i++) {
+    for (size_t i = 0; i < Vec_Get_Size(elems); i++) {
         ParserElem *elem = (ParserElem*)Vec_Fetch(elems, i);
         if (ParserElem_Get_Type(elem) != TOKEN_QUERY) {
             uint32_t num_to_zap = 0;
@@ -441,7 +441,7 @@ S_winnow_boolops(QueryParser *self, Vector *elems) {
             if (!following || ParserElem_Get_Type(following) != TOKEN_QUERY) {
                 num_to_zap = 1;
             }
-            for (uint32_t j = i + 1, jmax = Vec_Get_Size(elems); j < jmax; 
j++) {
+            for (size_t j = i + 1, jmax = Vec_Get_Size(elems); j < jmax; j++) {
                 ParserElem *maybe = (ParserElem*)Vec_Fetch(elems, j);
                 if (ParserElem_Get_Type(maybe) == TOKEN_QUERY) { break; }
                 else { num_to_zap++; }
@@ -456,7 +456,7 @@ static void
 S_compose_and_queries(QueryParser *self, Vector *elems) {
     const int32_t default_occur = QParser_IVARS(self)->default_occur;
 
-    for (uint32_t i = 0; i + 2 < Vec_Get_Size(elems); i++) {
+    for (size_t i = 0; i + 2 < Vec_Get_Size(elems); i++) {
         ParserElem *elem = (ParserElem*)Vec_Fetch(elems, i + 1);
         if (ParserElem_Get_Type(elem) == TOKEN_AND) {
             ParserElem   *preceding  = (ParserElem*)Vec_Fetch(elems, i);
@@ -468,7 +468,7 @@ S_compose_and_queries(QueryParser *self, Vector *elems) {
             Vec_Push(children, INCREF(preceding_query));
 
             // Add following clauses.
-            for (uint32_t j = i + 1, jmax = Vec_Get_Size(elems);
+            for (size_t j = i + 1, jmax = Vec_Get_Size(elems);
                  j < jmax;
                  j += 2, num_to_zap += 2
                 ) {
@@ -503,7 +503,7 @@ static void
 S_compose_or_queries(QueryParser *self, Vector *elems) {
     const int32_t default_occur = QParser_IVARS(self)->default_occur;
 
-    for (uint32_t i = 0; i + 2 < Vec_Get_Size(elems); i++) {
+    for (size_t i = 0; i + 2 < Vec_Get_Size(elems); i++) {
         ParserElem *elem = (ParserElem*)Vec_Fetch(elems, i + 1);
         if (ParserElem_Get_Type(elem) == TOKEN_OR) {
             ParserElem   *preceding  = (ParserElem*)Vec_Fetch(elems, i);
@@ -515,7 +515,7 @@ S_compose_or_queries(QueryParser *self, Vector *elems) {
             Vec_Push(children, INCREF(preceding_query));
 
             // Add following clauses.
-            for (uint32_t j = i + 1, jmax = Vec_Get_Size(elems);
+            for (size_t j = i + 1, jmax = Vec_Get_Size(elems);
                  j < jmax;
                  j += 2, num_to_zap += 2
                 ) {
@@ -668,7 +668,7 @@ S_has_valid_clauses(Query *query) {
     else if (Query_is_a(query, ORQUERY) || Query_is_a(query, ANDQUERY)) {
         PolyQuery *polyquery = (PolyQuery*)query;
         Vector    *children  = PolyQuery_Get_Children(polyquery);
-        for (uint32_t i = 0, max = Vec_Get_Size(children); i < max; i++) {
+        for (size_t i = 0, max = Vec_Get_Size(children); i < max; i++) {
             Query *child = (Query*)Vec_Fetch(children, i);
             if (S_has_valid_clauses(child)) {
                 return true;
@@ -698,7 +698,7 @@ S_do_prune(QueryParser *self, Query *query) {
         Vector    *children  = PolyQuery_Get_Children(polyquery);
 
         // Recurse.
-        for (uint32_t i = 0, max = Vec_Get_Size(children); i < max; i++) {
+        for (size_t i = 0, max = Vec_Get_Size(children); i < max; i++) {
             Query *child = (Query*)Vec_Fetch(children, i);
             S_do_prune(self, child);
         }
@@ -708,7 +708,7 @@ S_do_prune(QueryParser *self, Query *query) {
            ) {
             // Don't allow 'foo OR (-bar)'.
             Vector *children = PolyQuery_Get_Children(polyquery);
-            for (uint32_t i = 0, max = Vec_Get_Size(children); i < max; i++) {
+            for (size_t i = 0, max = Vec_Get_Size(children); i < max; i++) {
                 Query *child = (Query*)Vec_Fetch(children, i);
                 if (!S_has_valid_clauses(child)) {
                     Vec_Store(children, i, (Obj*)NoMatchQuery_new());
@@ -751,7 +751,7 @@ QParser_Expand_IMP(QueryParser *self, Query *query) {
         Vector *children = PolyQuery_Get_Children(polyquery);
         Vector *new_kids = Vec_new(Vec_Get_Size(children));
 
-        for (uint32_t i = 0, max = Vec_Get_Size(children); i < max; i++) {
+        for (size_t i = 0, max = Vec_Get_Size(children); i < max; i++) {
             Query *child = (Query*)Vec_Fetch(children, i);
             Query *new_child = QParser_Expand(self, child); // recurse
             if (new_child) {
@@ -899,7 +899,7 @@ QParser_Expand_Leaf_IMP(QueryParser *self, Query *query) {
 
     CharBuf *unescape_buf = CB_new(Str_Get_Size(source_text));
     Vector  *queries      = Vec_new(Vec_Get_Size(fields));
-    for (uint32_t i = 0, max = Vec_Get_Size(fields); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(fields); i < max; i++) {
         String   *field    = (String*)Vec_Fetch(fields, i);
         Analyzer *analyzer = ivars->analyzer
                              ? ivars->analyzer

http://git-wip-us.apache.org/repos/asf/lucy/blob/e2235fef/core/Lucy/Search/SortSpec.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Search/SortSpec.c b/core/Lucy/Search/SortSpec.c
index bdd22e2..337a827 100644
--- a/core/Lucy/Search/SortSpec.c
+++ b/core/Lucy/Search/SortSpec.c
@@ -37,7 +37,7 @@ SortSpec*
 SortSpec_init(SortSpec *self, Vector *rules) {
     SortSpecIVARS *const ivars = SortSpec_IVARS(self);
     ivars->rules = Vec_Clone(rules);
-    for (int32_t i = 0, max = Vec_Get_Size(rules); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(rules); i < max; i++) {
         SortRule *rule = (SortRule*)Vec_Fetch(rules, i);
         CERTIFY(rule, SORTRULE);
     }

http://git-wip-us.apache.org/repos/asf/lucy/blob/e2235fef/core/Lucy/Store/CompoundFileWriter.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Store/CompoundFileWriter.c 
b/core/Lucy/Store/CompoundFileWriter.c
index 9cb496a..af54dd6 100644
--- a/core/Lucy/Store/CompoundFileWriter.c
+++ b/core/Lucy/Store/CompoundFileWriter.c
@@ -110,7 +110,7 @@ S_do_consolidate(CompoundFileWriter *self, 
CompoundFileWriterIVARS *ivars) {
                     (Obj*)Str_newf("%i32", CFWriter_current_file_format));
 
     Vec_Sort(files);
-    for (uint32_t i = 0, max = Vec_Get_Size(files); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(files); i < max; i++) {
         String *infilename = (String*)Vec_Fetch(files, i);
 
         if (!Str_Ends_With_Utf8(infilename, ".json", 5)) {
@@ -167,7 +167,7 @@ S_do_consolidate(CompoundFileWriter *self, 
CompoundFileWriterIVARS *ivars) {
     DECREF(iter);
     */
     DECREF(sub_files);
-    for (uint32_t i = 0, max = Vec_Get_Size(merged); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(merged); i < max; i++) {
         String *merged_file = (String*)Vec_Fetch(merged, i);
         if (!Folder_Delete(folder, merged_file)) {
             String *mess = MAKE_MESS("Can't delete '%o'", merged_file);

http://git-wip-us.apache.org/repos/asf/lucy/blob/e2235fef/core/Lucy/Store/Folder.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Store/Folder.c b/core/Lucy/Store/Folder.c
index 774fd74..c9149da 100644
--- a/core/Lucy/Store/Folder.c
+++ b/core/Lucy/Store/Folder.c
@@ -181,14 +181,14 @@ Folder_Delete_Tree_IMP(Folder *self, String *path) {
                     }
                     DECREF(entry);
                 }
-                for (uint32_t i = 0, max = Vec_Get_Size(dirs); i < max; i++) {
+                for (size_t i = 0, max = Vec_Get_Size(dirs); i < max; i++) {
                     String *name = (String*)Vec_Fetch(files, i);
                     bool success = Folder_Delete_Tree(inner_folder, name);
                     if (!success && Folder_Local_Exists(inner_folder, name)) {
                         break;
                     }
                 }
-                for (uint32_t i = 0, max = Vec_Get_Size(files); i < max; i++) {
+                for (size_t i = 0, max = Vec_Get_Size(files); i < max; i++) {
                     String *name = (String*)Vec_Fetch(files, i);
                     bool success = Folder_Local_Delete(inner_folder, name);
                     if (!success && Folder_Local_Exists(inner_folder, name)) {

http://git-wip-us.apache.org/repos/asf/lucy/blob/e2235fef/core/Lucy/Test/Analysis/TestNormalizer.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Test/Analysis/TestNormalizer.c 
b/core/Lucy/Test/Analysis/TestNormalizer.c
index 011e8e2..d6028dc 100644
--- a/core/Lucy/Test/Analysis/TestNormalizer.c
+++ b/core/Lucy/Test/Analysis/TestNormalizer.c
@@ -85,7 +85,7 @@ test_normalization(TestBatchRunner *runner) {
     Vector *tests = (Vector*)Json_slurp_json((Folder*)modules_folder, path);
     if (!tests) { RETHROW(Err_get_error()); }
 
-    for (uint32_t i = 0, max = Vec_Get_Size(tests); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(tests); i < max; i++) {
         Hash *test = (Hash*)Vec_Fetch(tests, i);
         String *form = (String*)Hash_Fetch_Utf8(
                             test, "normalization_form", 18);
@@ -96,7 +96,7 @@ test_normalization(TestBatchRunner *runner) {
         Normalizer *normalizer = Normalizer_new(form, case_fold, 
strip_accents);
         Vector *words = (Vector*)Hash_Fetch_Utf8(test, "words", 5);
         Vector *norms = (Vector*)Hash_Fetch_Utf8(test, "norms", 5);
-        for (uint32_t j = 0, max = Vec_Get_Size(words); j < max; j++) {
+        for (size_t j = 0, max = Vec_Get_Size(words); j < max; j++) {
             String *word = (String*)Vec_Fetch(words, j);
             Vector *got  = Normalizer_Split(normalizer, word);
             String *norm = (String*)Vec_Fetch(got, 0);

http://git-wip-us.apache.org/repos/asf/lucy/blob/e2235fef/core/Lucy/Test/Analysis/TestSnowballStemmer.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Test/Analysis/TestSnowballStemmer.c 
b/core/Lucy/Test/Analysis/TestSnowballStemmer.c
index 5889247..4da377f 100644
--- a/core/Lucy/Test/Analysis/TestSnowballStemmer.c
+++ b/core/Lucy/Test/Analysis/TestSnowballStemmer.c
@@ -81,7 +81,7 @@ test_stemming(TestBatchRunner *runner) {
         Vector *words = (Vector*)Hash_Fetch_Utf8(lang_data, "words", 5);
         Vector *stems = (Vector*)Hash_Fetch_Utf8(lang_data, "stems", 5);
         SnowballStemmer *stemmer = SnowStemmer_new(iso);
-        for (uint32_t i = 0, max = Vec_Get_Size(words); i < max; i++) {
+        for (size_t i = 0, max = Vec_Get_Size(words); i < max; i++) {
             String *word  = (String*)Vec_Fetch(words, i);
             char   *wstr  = Str_To_Utf8(word);
             Vector *got   = SnowStemmer_Split(stemmer, word);

http://git-wip-us.apache.org/repos/asf/lucy/blob/e2235fef/core/Lucy/Test/Index/TestSortWriter.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Test/Index/TestSortWriter.c 
b/core/Lucy/Test/Index/TestSortWriter.c
index a354bfe..539cc9e 100644
--- a/core/Lucy/Test/Index/TestSortWriter.c
+++ b/core/Lucy/Test/Index/TestSortWriter.c
@@ -251,7 +251,7 @@ test_sort_writer(TestBatchRunner *runner) {
     {
         Vector *filenames = RAMFolder_List_R(folder, NULL);
         int num_old_seg_files = 0;
-        for (uint32_t i = 0, size = Vec_Get_Size(filenames); i < size; ++i) {
+        for (size_t i = 0, size = Vec_Get_Size(filenames); i < size; ++i) {
             String *filename = (String*)Vec_Fetch(filenames, i);
             if (Str_Contains_Utf8(filename, "seg_1", 5)
                 || Str_Contains_Utf8(filename, "seg_2", 5)

http://git-wip-us.apache.org/repos/asf/lucy/blob/e2235fef/core/Lucy/Test/Search/TestQueryParserSyntax.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Test/Search/TestQueryParserSyntax.c 
b/core/Lucy/Test/Search/TestQueryParserSyntax.c
index a0be919..1cc4680 100644
--- a/core/Lucy/Test/Search/TestQueryParserSyntax.c
+++ b/core/Lucy/Test/Search/TestQueryParserSyntax.c
@@ -93,7 +93,7 @@ build_index() {
 
     // Index documents.
     Vector *doc_set = TestUtils_doc_set();
-    for (uint32_t i = 0; i < Vec_Get_Size(doc_set); ++i) {
+    for (size_t i = 0; i < Vec_Get_Size(doc_set); ++i) {
         String *content_string = (String*)Vec_Fetch(doc_set, i);
         Doc *doc = Doc_new(NULL, 0);
         Doc_Store(doc, plain_str, (Obj*)content_string);

http://git-wip-us.apache.org/repos/asf/lucy/blob/e2235fef/core/Lucy/Test/Store/TestFSFolder.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Test/Store/TestFSFolder.c 
b/core/Lucy/Test/Store/TestFSFolder.c
index 22e3a2e..03eef57 100644
--- a/core/Lucy/Test/Store/TestFSFolder.c
+++ b/core/Lucy/Test/Store/TestFSFolder.c
@@ -127,7 +127,7 @@ test_protect_symlinks(TestBatchRunner *runner) {
     else {
         Vector *list = FSFolder_List_R(folder, NULL);
         bool saw_bazooka_boffo = false;
-        for (uint32_t i = 0, max = Vec_Get_Size(list); i < max; i++) {
+        for (size_t i = 0, max = Vec_Get_Size(list); i < max; i++) {
             String *entry = (String*)Vec_Fetch(list, i);
             if (Str_Ends_With_Utf8(entry, "bazooka/boffo", 13)) {
                 saw_bazooka_boffo = true;

http://git-wip-us.apache.org/repos/asf/lucy/blob/e2235fef/core/Lucy/Util/Freezer.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Util/Freezer.c b/core/Lucy/Util/Freezer.c
index e032baa..2a71de0 100644
--- a/core/Lucy/Util/Freezer.c
+++ b/core/Lucy/Util/Freezer.c
@@ -307,7 +307,7 @@ Freezer_read_hash(InStream *instream) {
 static Obj*
 S_dump_array(Vector *array) {
     Vector *dump = Vec_new(Vec_Get_Size(array));
-    for (uint32_t i = 0, max = Vec_Get_Size(array); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(array); i < max; i++) {
         Obj *elem = Vec_Fetch(array, i);
         if (elem) {
             Vec_Store(dump, i, Freezer_dump(elem));
@@ -451,7 +451,7 @@ Obj*
 S_load_from_array(Vector *dump) {
     Vector *loaded = Vec_new(Vec_Get_Size(dump));
 
-    for (uint32_t i = 0, max = Vec_Get_Size(dump); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(dump); i < max; i++) {
         Obj *elem_dump = Vec_Fetch(dump, i);
         if (elem_dump) {
             Vec_Store(loaded, i, Freezer_load(elem_dump));

http://git-wip-us.apache.org/repos/asf/lucy/blob/e2235fef/core/Lucy/Util/SortExternal.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Util/SortExternal.c b/core/Lucy/Util/SortExternal.c
index 976cab4..a9b923c 100644
--- a/core/Lucy/Util/SortExternal.c
+++ b/core/Lucy/Util/SortExternal.c
@@ -197,7 +197,7 @@ SortEx_Shrink_IMP(SortExternal *self) {
     FREEMEM(ivars->scratch);
     ivars->scratch = NULL;
 
-    for (uint32_t i = 0, max = Vec_Get_Size(ivars->runs); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(ivars->runs); i < max; i++) {
         SortExternal *run = (SortExternal*)Vec_Fetch(ivars->runs, i);
         SortEx_Shrink(run);
     }
@@ -231,7 +231,7 @@ static Obj**
 S_find_endpost(SortExternal *self, SortExternalIVARS *ivars) {
     Obj **endpost = NULL;
 
-    for (uint32_t i = 0, max = Vec_Get_Size(ivars->runs); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(ivars->runs); i < max; i++) {
         // Get a run and retrieve the last item in its buffer.
         SortExternal *const run = (SortExternal*)Vec_Fetch(ivars->runs, i);
         SortExternalIVARS *const run_ivars = SortEx_IVARS(run);

http://git-wip-us.apache.org/repos/asf/lucy/blob/e2235fef/core/LucyX/Search/ProximityQuery.c
----------------------------------------------------------------------
diff --git a/core/LucyX/Search/ProximityQuery.c 
b/core/LucyX/Search/ProximityQuery.c
index 3e0337d..e4c4fb5 100644
--- a/core/LucyX/Search/ProximityQuery.c
+++ b/core/LucyX/Search/ProximityQuery.c
@@ -72,7 +72,7 @@ S_do_init(ProximityQuery *self, String *field, Vector *terms, 
float boost,
           uint32_t within) {
     Query_init((Query*)self, boost);
     ProximityQueryIVARS *const ivars = ProximityQuery_IVARS(self);
-    for (uint32_t i = 0, max = Vec_Get_Size(terms); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(terms); i < max; i++) {
         CERTIFY(Vec_Fetch(terms, i), OBJ);
     }
     ivars->field  = field;
@@ -239,7 +239,7 @@ ProximityCompiler_init(ProximityCompiler *self, 
ProximityQuery *parent,
 
     // Store IDF for the phrase.
     ivars->idf = 0;
-    for (uint32_t i = 0, max = Vec_Get_Size(terms); i < max; i++) {
+    for (size_t i = 0, max = Vec_Get_Size(terms); i < max; i++) {
         Obj *term = Vec_Fetch(terms, i);
         int32_t doc_max  = Searcher_Doc_Max(searcher);
         int32_t doc_freq

Reply via email to