reschke commented on code in PR #2409:
URL: https://github.com/apache/jackrabbit-oak/pull/2409#discussion_r2382515865
##########
oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureBlobStoreBackend.java:
##########
@@ -280,39 +240,63 @@ public InputStream read(DataIdentifier identifier) throws
DataStoreException {
try {
Thread.currentThread().setContextClassLoader(
getClass().getClassLoader());
- CloudBlockBlob blob =
getAzureContainer().getBlockBlobReference(key);
+ BlockBlobClient blob =
getAzureContainer().getBlobClient(key).getBlockBlobClient();
if (!blob.exists()) {
throw new DataStoreException(String.format("Trying to read
missing blob. identifier=%s", key));
}
InputStream is = blob.openInputStream();
LOG.debug("Got input stream for blob. identifier={} duration={}",
key, (System.currentTimeMillis() - start));
if (LOG_STREAMS_DOWNLOAD.isDebugEnabled()) {
- // Log message, with exception so we can get a trace to see
where the call came from
+ // Log message, with exception, so we can get a trace to see
where the call came from
LOG_STREAMS_DOWNLOAD.debug("Binary downloaded from Azure Blob
Storage - identifier={}", key, new Exception());
}
return is;
- }
- catch (StorageException e) {
+ } catch (BlobStorageException e) {
LOG.info("Error reading blob. identifier={}", key);
throw new DataStoreException(String.format("Cannot read blob.
identifier=%s", key), e);
- }
- catch (URISyntaxException e) {
- LOG.debug("Error reading blob. identifier={}", key);
- throw new DataStoreException(String.format("Cannot read blob.
identifier=%s", key), e);
} finally {
if (contextClassLoader != null) {
Thread.currentThread().setContextClassLoader(contextClassLoader);
}
}
}
+ private void uploadBlob(BlockBlobClient client, File file, long len, long
start, String key) throws IOException {
+
+ boolean useBufferedStream = len < AZURE_BLOB_BUFFERED_STREAM_THRESHOLD;
+ try (InputStream in = useBufferedStream ?
+ new BufferedInputStream(new FileInputStream(file))
+ : new FileInputStream(file)) {
+
+ ParallelTransferOptions parallelTransferOptions = new
ParallelTransferOptions()
+ .setBlockSizeLong(len)
+ .setMaxConcurrency(concurrentRequestCount)
+
.setMaxSingleUploadSizeLong(AZURE_BLOB_MAX_SINGLE_PUT_UPLOAD_SIZE);
+ BlobUploadFromFileOptions options = new
BlobUploadFromFileOptions(file.toString());
+ options.setParallelTransferOptions(parallelTransferOptions);
+ try {
+ BlobClient blobClient =
client.getContainerClient().getBlobClient(key);
+ Response<BlockBlobItem> blockBlob =
blobClient.uploadFromFileWithResponse(options, null, null);
+ LOG.debug("Upload status is {} for blob {}",
blockBlob.getStatusCode(), key);
+ } catch (UncheckedIOException ex) {
+ System.err.printf("Failed to upload from file: %s%n",
ex.getMessage());
+ throw new IOException("Failed to upload blob: " + key, ex);
+ }
+ LOG.debug("Blob created. identifier={} length={} duration={}
buffered={}", key, len, (System.currentTimeMillis() - start),
useBufferedStream);
+ if (LOG_STREAMS_UPLOAD.isDebugEnabled()) {
+ // Log message, with exception, so we can get a trace to see
where the call came from
+ LOG_STREAMS_UPLOAD.debug("Binary uploaded to Azure Blob
Storage - identifier={}", key, new Exception());
+ }
+ }
+ }
+
@Override
public void write(DataIdentifier identifier, File file) throws
DataStoreException {
- if (null == identifier) {
+ if (identifier == null) {
Review Comment:
These are pointless here; Java 17 will produce a NPE with the same details.
if you really want throw at the start, just use Objects.requireNonNull.
##########
oak-run/pom.xml:
##########
@@ -336,10 +336,6 @@
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</dependency>
Review Comment:
Why are you removing these dependencies?
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]