This is an automated email from the ASF dual-hosted git repository.
rzo1 pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/opennlp-sandbox.git
The following commit(s) were added to refs/heads/main by this push:
new c28a673 OPENNLP-1737 - Allow configuration of the inbound message
size (#284)
c28a673 is described below
commit c28a6737bc7a9bc2ed07b1894caedf846a876f1e
Author: DaryanIbe <[email protected]>
AuthorDate: Tue May 27 21:21:25 2025 +0200
OPENNLP-1737 - Allow configuration of the inbound message size (#284)
* OPENNLP-1737 - Allow configuration of the inbound message size
* Remove inline comment
---------
Co-authored-by: Daryan Ibe <[email protected]>
Co-authored-by: Richard Zowalla <[email protected]>
---
.../opennlp-grpc-service/src/main/java/opennlp/OpenNLPServer.java | 7 ++++++-
.../opennlp-grpc-service/src/test/resources/config-test.ini | 3 ++-
2 files changed, 8 insertions(+), 2 deletions(-)
diff --git
a/opennlp-grpc/opennlp-grpc-service/src/main/java/opennlp/OpenNLPServer.java
b/opennlp-grpc/opennlp-grpc-service/src/main/java/opennlp/OpenNLPServer.java
index a51dcbe..c498d8b 100644
--- a/opennlp-grpc/opennlp-grpc-service/src/main/java/opennlp/OpenNLPServer.java
+++ b/opennlp-grpc/opennlp-grpc-service/src/main/java/opennlp/OpenNLPServer.java
@@ -143,10 +143,15 @@ public class OpenNLPServer implements Callable<Integer> {
Boolean.parseBoolean(
configuration.getOrDefault("server.enable_reflection", "false"));
+ final int maxInboundMessageSize =
+ Integer.parseInt(
+
(configuration.getOrDefault("server.max_inbound_message_size", "10485760")));
// 10 MB
+
final ServerBuilder<?> builder = ServerBuilder.forPort(port)
.addService(new PosTaggerService(configuration))
.addService(new TokenizerService(configuration))
- .addService(new SentenceDetectorService(configuration));
+ .addService(new SentenceDetectorService(configuration))
+ .maxInboundMessageSize(maxInboundMessageSize);
if (enableReflection) {
builder.addService(ProtoReflectionServiceV1.newInstance());
diff --git
a/opennlp-grpc/opennlp-grpc-service/src/test/resources/config-test.ini
b/opennlp-grpc/opennlp-grpc-service/src/test/resources/config-test.ini
index 842cc4b..cb891c7 100644
--- a/opennlp-grpc/opennlp-grpc-service/src/test/resources/config-test.ini
+++ b/opennlp-grpc/opennlp-grpc-service/src/test/resources/config-test.ini
@@ -17,9 +17,10 @@
# Set to true to enable gRPC server reflection, see
https://grpc.io/docs/guides/reflection/
server.enable_reflection = false
+server.max_inbound_message_size=10485760
model.location=target/test-classes/models
model.recursive=true
model.pos.wildcard.pattern=opennlp-models-pos-*.jar
model.tokenizer.wildcard.pattern=opennlp-models-tokenizer-*.jar
-model.sentdetect.wildcard.pattern=opennlp-models-sentdetect-*.jar
+model.sentdetect.wildcard.pattern=opennlp-models-sentdetect-*.jar
\ No newline at end of file