[ 
https://issues.apache.org/jira/browse/OAK-10127?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17743531#comment-17743531
 ] 

Julian Reschke edited comment on OAK-10127 at 2/18/26 3:41 PM:
---------------------------------------------------------------

trunk: (1.54.0) 
[fbf3df6e5b|https://github.com/apache/jackrabbit-oak/commit/fbf3df6e5b3b175f7c3fb7af64e2dfcbb773161b]


was (Author: reschke):
trunk: 
[fbf3df6e5b|https://github.com/apache/jackrabbit-oak/commit/fbf3df6e5b3b175f7c3fb7af64e2dfcbb773161b]

> Log warn message when MongoDB document is big
> ---------------------------------------------
>
>                 Key: OAK-10127
>                 URL: https://issues.apache.org/jira/browse/OAK-10127
>             Project: Jackrabbit Oak
>          Issue Type: Task
>          Components: documentmk
>            Reporter: Ankita Agarwal
>            Priority: Major
>             Fix For: 1.54.0
>
>         Attachments: OAK-10127.patch
>
>
> MongoDocumentStore has a retry logic in the document find implementation. 
> MongoDocumentStore should throw an exception along with more details when a 
> document of more than 16MB size is updated.
> From the logs/exception below it is not clear what document we failed to 
> insert and what the payload was:
> {code:java}
> 05:59:47.518 [main] ERROR com.adobe.granite.indexing.tool.Main - Can't 
> perform operation
> org.bson.BsonMaximumSizeExceededException: Payload document size is larger 
> than maximum of 16777216.
>         at 
> com.mongodb.internal.connection.BsonWriterHelper.writePayload(BsonWriterHelper.java:68)
>         at 
> com.mongodb.internal.connection.CommandMessage.encodeMessageBodyWithMetadata(CommandMessage.java:147)
>         at 
> com.mongodb.internal.connection.RequestMessage.encode(RequestMessage.java:138)
>         at 
> com.mongodb.internal.connection.CommandMessage.encode(CommandMessage.java:61)
>         at 
> com.mongodb.internal.connection.InternalStreamConnection.sendAndReceive(InternalStreamConnection.java:248)
>         at 
> com.mongodb.internal.connection.UsageTrackingInternalConnection.sendAndReceive(UsageTrackingInternalConnection.java:99)
>         at 
> com.mongodb.internal.connection.DefaultConnectionPool$PooledConnection.sendAndReceive(DefaultConnectionPool.java:450)
>         at 
> com.mongodb.internal.connection.CommandProtocolImpl.execute(CommandProtocolImpl.java:72)
>         at 
> com.mongodb.internal.connection.DefaultServer$DefaultServerProtocolExecutor.execute(DefaultServer.java:226)
>         at 
> com.mongodb.internal.connection.DefaultServerConnection.executeProtocol(DefaultServerConnection.java:269)
>         at 
> com.mongodb.internal.connection.DefaultServerConnection.command(DefaultServerConnection.java:131)
>         at 
> com.mongodb.operation.MixedBulkWriteOperation.executeCommand(MixedBulkWriteOperation.java:435)
>         at 
> com.mongodb.operation.MixedBulkWriteOperation.executeBulkWriteBatch(MixedBulkWriteOperation.java:261)
>         at 
> com.mongodb.operation.MixedBulkWriteOperation.access$700(MixedBulkWriteOperation.java:72)
>         at 
> com.mongodb.operation.MixedBulkWriteOperation$1.call(MixedBulkWriteOperation.java:205)
>         at 
> com.mongodb.operation.MixedBulkWriteOperation$1.call(MixedBulkWriteOperation.java:196)
>         at 
> com.mongodb.operation.OperationHelper.withReleasableConnection(OperationHelper.java:501)
>         at 
> com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:196)
>         at 
> com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:71)
>         at 
> com.mongodb.client.internal.MongoClientDelegate$DelegateOperationExecutor.execute(MongoClientDelegate.java:211)
>         at 
> com.mongodb.client.internal.MongoCollectionImpl.executeInsertMany(MongoCollectionImpl.java:529)
>         at 
> com.mongodb.client.internal.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:524)
>         at 
> com.mongodb.client.internal.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:518)
>         at 
> org.apache.jackrabbit.oak.plugins.document.mongo.MongoDocumentStore.lambda$create$11(MongoDocumentStore.java:1366)
>         at 
> org.apache.jackrabbit.oak.plugins.document.mongo.MongoDocumentStore.execute(MongoDocumentStore.java:1914)
>         at 
> org.apache.jackrabbit.oak.plugins.document.mongo.MongoDocumentStore.create(MongoDocumentStore.java:1364)
>         at 
> org.apache.jackrabbit.oak.plugins.document.util.LeaseCheckDocumentStoreWrapper.create(LeaseCheckDocumentStoreWrapper.java:120)
>         at 
> org.apache.jackrabbit.oak.plugins.document.Commit.applyToDocumentStore(Commit.java:349)
>         at 
> org.apache.jackrabbit.oak.plugins.document.Commit.applyToDocumentStoreWithTiming(Commit.java:278)
>         at 
> org.apache.jackrabbit.oak.plugins.document.Commit.prepare(Commit.java:245)
>         at 
> org.apache.jackrabbit.oak.plugins.document.Commit.apply(Commit.java:209)
>         at 
> org.apache.jackrabbit.oak.plugins.document.DocumentNodeStoreBranch.persist(DocumentNodeStoreBranch.java:321)
>         at 
> org.apache.jackrabbit.oak.plugins.document.DocumentNodeStoreBranch.persist(DocumentNodeStoreBranch.java:283)
>         at 
> org.apache.jackrabbit.oak.plugins.document.DocumentNodeStoreBranch.access$500(DocumentNodeStoreBranch.java:57)
>         at 
> org.apache.jackrabbit.oak.plugins.document.DocumentNodeStoreBranch$Persisted.persistTransientHead(DocumentNodeStoreBranch.java:719)
>         at 
> org.apache.jackrabbit.oak.plugins.document.DocumentNodeStoreBranch$Persisted.access$000(DocumentNodeStoreBranch.java:619)
>         at 
> org.apache.jackrabbit.oak.plugins.document.DocumentNodeStoreBranch$BranchState.persist(DocumentNodeStoreBranch.java:365)
>         at 
> org.apache.jackrabbit.oak.plugins.document.DocumentNodeStoreBranch$InMemory.setRoot(DocumentNodeStoreBranch.java:502)
>         at 
> org.apache.jackrabbit.oak.plugins.document.DocumentNodeStoreBranch.setRoot(DocumentNodeStoreBranch.java:113)
>         at 
> org.apache.jackrabbit.oak.plugins.document.DocumentRootBuilder.purge(DocumentRootBuilder.java:185)
>         at 
> org.apache.jackrabbit.oak.plugins.document.DocumentRootBuilder.getNodeState(DocumentRootBuilder.java:113)
>         at 
> org.apache.jackrabbit.oak.spi.commit.EditorHook.processCommit(EditorHook.java:56)
>         at 
> org.apache.jackrabbit.oak.spi.commit.CompositeHook.processCommit(CompositeHook.java:60)
>         at 
> org.apache.jackrabbit.oak.plugins.document.TimingHook.processCommit(TimingHook.java:59)
>         at 
> org.apache.jackrabbit.oak.plugins.document.DocumentNodeStoreBranch$InMemory.merge(DocumentNodeStoreBranch.java:542)
>         at 
> org.apache.jackrabbit.oak.plugins.document.DocumentNodeStoreBranch.merge0(DocumentNodeStoreBranch.java:197)
>         at 
> org.apache.jackrabbit.oak.plugins.document.DocumentNodeStoreBranch.merge(DocumentNodeStoreBranch.java:129)
>         at 
> org.apache.jackrabbit.oak.plugins.document.DocumentRootBuilder.merge(DocumentRootBuilder.java:170)
>         at 
> org.apache.jackrabbit.oak.plugins.document.DocumentNodeStore.merge(DocumentNodeStore.java:2001)
>         at 
> org.apache.jackrabbit.oak.plugins.index.importer.NodeStoreUtils.mergeWithConcurrentCheck(NodeStoreUtils.java:65)
>         at 
> org.apache.jackrabbit.oak.plugins.index.importer.IndexImporter.importIndexData(IndexImporter.java:223)
>         at 
> org.apache.jackrabbit.oak.plugins.index.importer.IndexImporter.runWithRetry(IndexImporter.java:469)
>         at 
> org.apache.jackrabbit.oak.plugins.index.importer.IndexImporter.importIndex(IndexImporter.java:127)
>         at 
> org.apache.jackrabbit.oak.index.IndexImporterSupportBase.importIndex(IndexImporterSupportBase.java:45)
>         at 
> org.apache.jackrabbit.oak.index.IndexImporter.importIndex(IndexImporter.java:30)
>         at 
> com.adobe.granite.indexing.tool.ImportIndexCmd.run(ImportIndexCmd.java:108)
>         at com.adobe.granite.indexing.tool.Main.main(Main.java:100)
> {code}
>        



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

Reply via email to