This is an automated email from the ASF dual-hosted git repository.
reschke pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/jackrabbit-oak.git
The following commit(s) were added to refs/heads/trunk by this push:
new 5069dfdf5f OAK-12113: MongoDocumentStore: BsonException should be
wrapped as DocumentStoreException (#2771)
5069dfdf5f is described below
commit 5069dfdf5f307d5996050872a2a51380d186fcec
Author: Julian Reschke <[email protected]>
AuthorDate: Wed Mar 4 17:21:31 2026 +0100
OAK-12113: MongoDocumentStore: BsonException should be wrapped as
DocumentStoreException (#2771)
---
.../plugins/document/mongo/MongoDocumentStore.java | 50 ++++++++++++----------
1 file changed, 28 insertions(+), 22 deletions(-)
diff --git
a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/mongo/MongoDocumentStore.java
b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/mongo/MongoDocumentStore.java
index 756edc8ce0..312ba26bdb 100644
---
a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/mongo/MongoDocumentStore.java
+++
b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/mongo/MongoDocumentStore.java
@@ -1652,28 +1652,7 @@ public class MongoDocumentStore implements DocumentStore
{
failedUpdates.add(bulkIds[err.getIndex()]);
}
} catch (BSONException bsonException) {
- LOG.error("bulkUpdate of size {} failed with: {}",
updateOps.size(),
- bsonException.getMessage(), bsonException);
-
- // add diagnostics
- String idOfbiggestUpdate = "";
- int estimatedSizeOfBiggestUpdate = 0;
-
- for (UpdateOp updateOp : updateOps) {
- String id = updateOp.getId();
- // this could be made more precise my measuring the BSON
serialization of
- // conditions and updates
- int estimatedSize = updateOp.toString().length();
- LOG.debug("after bulk write: string serialization of changes
for id={} had an approximate size of {}",
- id, estimatedSize);
- if (estimatedSize > estimatedSizeOfBiggestUpdate) {
- idOfbiggestUpdate = id;
- estimatedSizeOfBiggestUpdate = estimatedSize;
- }
- }
- LOG.error("bulkUpdate of size {} failed with: {}; biggest update
was for i={} with approximate size of {}",
- updateOps.size(), bsonException.getMessage(),
idOfbiggestUpdate, estimatedSizeOfBiggestUpdate,
- bsonException);
+ diagnoseAndLogBSONException(updateOps, bsonException);
// rethrow
throw bsonException;
}
@@ -2067,6 +2046,33 @@ public class MongoDocumentStore implements DocumentStore
{
return Filters.eq(Document.ID, key);
}
+ // given a BSONException, log estimated sizes for each update, also
determining the biggest one
+ // this can help to find out whether retries in smaller batches could have
helped (see OAK-12104)
+ private static void
diagnoseAndLogBSONException(java.util.Collection<UpdateOp> updateOps,
BSONException bsonException) {
+ LOG.error("bulkUpdate of size {} failed with: {}", updateOps.size(),
+ bsonException.getMessage(), bsonException);
+
+ // add diagnostics
+ String idOfbiggestUpdate = "";
+ int estimatedSizeOfBiggestUpdate = 0;
+
+ for (UpdateOp updateOp : updateOps) {
+ String id = updateOp.getId();
+ // this could be made more precise my measuring the BSON
serialization of
+ // conditions and updates
+ int estimatedSize = updateOp.toString().length();
+ LOG.debug("after bulk write: string serialization of changes for
id={} had an approximate size of {}",
+ id, estimatedSize);
+ if (estimatedSize > estimatedSizeOfBiggestUpdate) {
+ idOfbiggestUpdate = id;
+ estimatedSizeOfBiggestUpdate = estimatedSize;
+ }
+ }
+ LOG.error("bulkUpdate of size {} failed with: {}; biggest update was
for i={} with approximate size of {}",
+ updateOps.size(), bsonException.getMessage(),
idOfbiggestUpdate, estimatedSizeOfBiggestUpdate,
+ bsonException);
+ }
+
@Override
public void dispose() {
connection.close();