commit 13e266d3aa6a02e8a04a2157da39cc60667c8a64
Author: synhershko <synhershko@users.sourceforge.net>
Date:   Thu Jun 24 17:19:20 2010 +0300

    New test by Kostka Bořivoj and an iteration fix
    
    Ported from https://issues.apache.org/jira/secure/attachment/12370935/LUCENE-1072.take2.patch

diff --git a/src/core/CLucene/index/IndexWriter.cpp b/src/core/CLucene/index/IndexWriter.cpp
index 375d071..9c1cb23 100644
--- a/src/core/CLucene/index/IndexWriter.cpp
+++ b/src/core/CLucene/index/IndexWriter.cpp
@@ -2290,7 +2290,7 @@ void IndexWriter::Internal::applyDeletesSelectively(const DocumentsWriter::TermN
 
   if (deleteIds.size() > 0) {
     vector<int32_t>::const_iterator iter2 = deleteIds.begin();
-    while(iter2 != deleteIds.end() )
+    for ( ; iter2 != deleteIds.end(); ++iter2 )
       reader->deleteDocument(*iter2);
   }
 }
diff --git a/src/test/index/TestIndexWriter.cpp b/src/test/index/TestIndexWriter.cpp
index 416bb1a..fd2df33 100644
--- a/src/test/index/TestIndexWriter.cpp
+++ b/src/test/index/TestIndexWriter.cpp
@@ -258,16 +258,85 @@ void testHashingBug(CuTest *tc){
   _CL_DECREF(&dir);
 }
 
+void testExceptionFromTokenStream(CuTest *tc) {
+
+    class TokenFilterWithException : public TokenFilter
+    {
+    private:
+        int count;
+
+    public:
+        TokenFilterWithException(TokenStream * in) : 
+          TokenFilter(in, true), count(0) {};
+
+          Token* next(Token * pToken) {
+              if (count++ == 5) {
+                  _CLTHROWA(CL_ERR_IO, "TokenFilterWithException testing IO exception");         
+              }
+              return input->next(pToken);
+          };
+    };
+
+    class AnalyzerWithException : public Analyzer
+    {
+    public:
+        TokenStream* tokenStream(const TCHAR * fieldName, Reader * reader) {
+            return _CLNEW TokenFilterWithException(_CLNEW WhitespaceTokenizer(reader));
+        };
+    };
+
+    RAMDirectory * dir = _CLNEW RAMDirectory();
+    AnalyzerWithException a;
+    IndexWriter * writer = _CLNEW IndexWriter(dir, &a, true);
+
+    Document* doc = _CLNEW Document();
+    doc->add(* _CLNEW Field(_T("content"), _T("aa bb cc dd ee ff gg hh ii"), Field::STORE_NO | Field::INDEX_TOKENIZED));
+    try {
+        writer->addDocument(doc);
+        CuFail(tc, _T("did not hit expected exception"));
+    } catch (CLuceneError& e) {
+    }
+    _CLLDELETE(doc);
+
+    // Make sure we can add another normal document
+    doc = _CLNEW Document();
+    doc->add(* _CLNEW Field(_T("content"), _T("aa bb cc dd"), Field::STORE_NO | Field::INDEX_TOKENIZED));
+    writer->addDocument(doc);
+    _CLLDELETE(doc);
+
+    // Make sure we can add another normal document
+    doc = _CLNEW Document();
+    doc->add(* _CLNEW Field(_T("content"), _T("aa bb cc dd"), Field::STORE_NO | Field::INDEX_TOKENIZED));
+    writer->addDocument(doc);
+
+    writer->close();
+    _CLLDELETE(writer);
+    _CLLDELETE(doc);
+
+    /*
+    TODO:
+    IndexReader reader = IndexReader.open(dir);
+    assertEquals(reader.docFreq(new Term("content", "aa")), 3);
+    assertEquals(reader.docFreq(new Term("content", "gg")), 0);
+    reader.close();
+    */
+
+    dir->close();
+    _CLLDELETE(dir);
+}
 
 CuSuite *testindexwriter(void)
 {
     CuSuite *suite = CuSuiteNew(_T("CLucene IndexWriter Test"));
+
     SUITE_ADD_TEST(suite, testHashingBug);
     SUITE_ADD_TEST(suite, testAddIndexes);
     SUITE_ADD_TEST(suite, testIWmergeSegments1);
     SUITE_ADD_TEST(suite, testIWmergeSegments2);
     SUITE_ADD_TEST(suite, testIWmergePhraseSegments);
 
+    SUITE_ADD_TEST(suite, testExceptionFromTokenStream);    // JIRA issue 1072
+
   return suite;
 }
 // EOF
