fabriziofortino commented on code in PR #587: URL: https://github.com/apache/jackrabbit-oak/pull/587#discussion_r908476967
########## oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/FlatFileSplitter.java: ########## @@ -0,0 +1,259 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.jackrabbit.oak.index.indexer.document.flatfile; + +import org.apache.commons.io.FileUtils; +import org.apache.jackrabbit.oak.api.PropertyState; +import org.apache.jackrabbit.oak.api.Type; +import org.apache.jackrabbit.oak.commons.Compression; +import org.apache.jackrabbit.oak.index.indexer.document.NodeStateEntry; +import org.apache.jackrabbit.oak.plugins.index.search.Aggregate; +import org.apache.jackrabbit.oak.plugins.index.search.IndexDefinition; +import org.apache.jackrabbit.oak.query.ast.NodeTypeInfo; +import org.apache.jackrabbit.oak.query.ast.NodeTypeInfoProvider; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.BufferedReader; +import java.io.BufferedWriter; +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.Stack; +import java.util.stream.Collectors; + +import static org.apache.jackrabbit.JcrConstants.JCR_PRIMARYTYPE; +import static org.apache.jackrabbit.JcrConstants.NT_BASE; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileNodeStoreBuilder.DEFAULT_NUMBER_OF_SPLIT_STORE_SIZE; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileNodeStoreBuilder.OAK_INDEXER_USE_LZ4; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileNodeStoreBuilder.OAK_INDEXER_USE_ZIP; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileNodeStoreBuilder.PROP_SPLIT_STORE_SIZE; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileStoreUtils.createReader; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileStoreUtils.createWriter; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileStoreUtils.getSortedStoreFileName; + +/** + * This class is being used when {@link FlatFileNodeStoreBuilder.OAK_INDEXER_PARALLEL_INDEX} is set to true. + * It will split a flat file safely by checking the index definitions. An entry is considered safe to split if only + * none of the parent directories contains nodes in indexRule and aggregate fields of the provided index definitions. + */ +public class FlatFileSplitter { + private static final Logger log = LoggerFactory.getLogger(FlatFileSplitter.class); Review Comment: this should be uppercase ```suggestion private static final Logger LOG = LoggerFactory.getLogger(FlatFileSplitter.class); ``` ########## oak-commons/src/main/java/org/apache/jackrabbit/oak/commons/sort/ExternalSort.java: ########## @@ -17,6 +17,11 @@ package org.apache.jackrabbit.oak.commons.sort; // filename: ExternalSort.java Review Comment: I would remove this comment ```suggestion ``` ########## oak-commons/src/main/java/org/apache/jackrabbit/oak/commons/Compression.java: ########## @@ -0,0 +1,68 @@ +package org.apache.jackrabbit.oak.commons; Review Comment: missing license ########## oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/FlatFileSplitter.java: ########## @@ -0,0 +1,259 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.jackrabbit.oak.index.indexer.document.flatfile; + +import org.apache.commons.io.FileUtils; +import org.apache.jackrabbit.oak.api.PropertyState; +import org.apache.jackrabbit.oak.api.Type; +import org.apache.jackrabbit.oak.commons.Compression; +import org.apache.jackrabbit.oak.index.indexer.document.NodeStateEntry; +import org.apache.jackrabbit.oak.plugins.index.search.Aggregate; +import org.apache.jackrabbit.oak.plugins.index.search.IndexDefinition; +import org.apache.jackrabbit.oak.query.ast.NodeTypeInfo; +import org.apache.jackrabbit.oak.query.ast.NodeTypeInfoProvider; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.BufferedReader; +import java.io.BufferedWriter; +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.Stack; +import java.util.stream.Collectors; + +import static org.apache.jackrabbit.JcrConstants.JCR_PRIMARYTYPE; +import static org.apache.jackrabbit.JcrConstants.NT_BASE; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileNodeStoreBuilder.DEFAULT_NUMBER_OF_SPLIT_STORE_SIZE; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileNodeStoreBuilder.OAK_INDEXER_USE_LZ4; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileNodeStoreBuilder.OAK_INDEXER_USE_ZIP; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileNodeStoreBuilder.PROP_SPLIT_STORE_SIZE; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileStoreUtils.createReader; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileStoreUtils.createWriter; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileStoreUtils.getSortedStoreFileName; + +/** + * This class is being used when {@link FlatFileNodeStoreBuilder.OAK_INDEXER_PARALLEL_INDEX} is set to true. + * It will split a flat file safely by checking the index definitions. An entry is considered safe to split if only + * none of the parent directories contains nodes in indexRule and aggregate fields of the provided index definitions. + */ +public class FlatFileSplitter { + private static final Logger log = LoggerFactory.getLogger(FlatFileSplitter.class); + + private static final String SPLIT_DIR_NAME = "split"; + private static final long MINIMUM_SPLIT_THRESHOLD = 10 * FileUtils.ONE_MB; + + private final File workDir; + private final NodeTypeInfoProvider infoProvider; + private final File flatFile; + private final NodeStateEntryReader entryReader; + private final Compression.Algorithm algorithm; + private Set<IndexDefinition> indexDefinitions; + private Set<String> splitNodeTypeNames; + private long minimumSplitThreshold = MINIMUM_SPLIT_THRESHOLD; + private int splitSize = Integer.getInteger(PROP_SPLIT_STORE_SIZE, DEFAULT_NUMBER_OF_SPLIT_STORE_SIZE); + private boolean useCompression = Boolean.parseBoolean(System.getProperty(OAK_INDEXER_USE_ZIP, "true")); + private boolean useLZ4 = Boolean.parseBoolean(System.getProperty(OAK_INDEXER_USE_LZ4, "false")); Review Comment: they can actually be all static ```suggestion private final int splitSize = Integer.getInteger(PROP_SPLIT_STORE_SIZE, DEFAULT_NUMBER_OF_SPLIT_STORE_SIZE); private final boolean useCompression = Boolean.parseBoolean(System.getProperty(OAK_INDEXER_USE_ZIP, "true")); private final boolean useLZ4 = Boolean.parseBoolean(System.getProperty(OAK_INDEXER_USE_LZ4, "false")); ``` ########## oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/FlatFileSplitter.java: ########## @@ -0,0 +1,259 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.jackrabbit.oak.index.indexer.document.flatfile; + +import org.apache.commons.io.FileUtils; +import org.apache.jackrabbit.oak.api.PropertyState; +import org.apache.jackrabbit.oak.api.Type; +import org.apache.jackrabbit.oak.commons.Compression; +import org.apache.jackrabbit.oak.index.indexer.document.NodeStateEntry; +import org.apache.jackrabbit.oak.plugins.index.search.Aggregate; +import org.apache.jackrabbit.oak.plugins.index.search.IndexDefinition; +import org.apache.jackrabbit.oak.query.ast.NodeTypeInfo; +import org.apache.jackrabbit.oak.query.ast.NodeTypeInfoProvider; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.BufferedReader; +import java.io.BufferedWriter; +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.Stack; +import java.util.stream.Collectors; + +import static org.apache.jackrabbit.JcrConstants.JCR_PRIMARYTYPE; +import static org.apache.jackrabbit.JcrConstants.NT_BASE; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileNodeStoreBuilder.DEFAULT_NUMBER_OF_SPLIT_STORE_SIZE; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileNodeStoreBuilder.OAK_INDEXER_USE_LZ4; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileNodeStoreBuilder.OAK_INDEXER_USE_ZIP; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileNodeStoreBuilder.PROP_SPLIT_STORE_SIZE; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileStoreUtils.createReader; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileStoreUtils.createWriter; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileStoreUtils.getSortedStoreFileName; + +/** + * This class is being used when {@link FlatFileNodeStoreBuilder.OAK_INDEXER_PARALLEL_INDEX} is set to true. + * It will split a flat file safely by checking the index definitions. An entry is considered safe to split if only + * none of the parent directories contains nodes in indexRule and aggregate fields of the provided index definitions. + */ +public class FlatFileSplitter { + private static final Logger log = LoggerFactory.getLogger(FlatFileSplitter.class); + + private static final String SPLIT_DIR_NAME = "split"; + private static final long MINIMUM_SPLIT_THRESHOLD = 10 * FileUtils.ONE_MB; + + private final File workDir; + private final NodeTypeInfoProvider infoProvider; + private final File flatFile; + private final NodeStateEntryReader entryReader; + private final Compression.Algorithm algorithm; + private Set<IndexDefinition> indexDefinitions; + private Set<String> splitNodeTypeNames; + private long minimumSplitThreshold = MINIMUM_SPLIT_THRESHOLD; + private int splitSize = Integer.getInteger(PROP_SPLIT_STORE_SIZE, DEFAULT_NUMBER_OF_SPLIT_STORE_SIZE); + private boolean useCompression = Boolean.parseBoolean(System.getProperty(OAK_INDEXER_USE_ZIP, "true")); + private boolean useLZ4 = Boolean.parseBoolean(System.getProperty(OAK_INDEXER_USE_LZ4, "false")); + + public FlatFileSplitter(File flatFile, File workdir, NodeTypeInfoProvider infoProvider, NodeStateEntryReader entryReader, + Set<IndexDefinition> indexDefinitions) { + this.flatFile = flatFile; + this.indexDefinitions = indexDefinitions; + this.workDir = new File(workdir, SPLIT_DIR_NAME); + + this.infoProvider = infoProvider; + this.entryReader = entryReader; + + Compression.Algorithm algorithm = Compression.Algorithm.GZIP; + if (!useCompression) { + algorithm = Compression.Algorithm.NONE; + } else if (useLZ4) { + algorithm = Compression.Algorithm.LZ4; + } + this.algorithm = algorithm; + } + + private List<File> returnOriginalFlatFile() { + return Collections.singletonList(flatFile); + } + + public List<File> split() throws IOException { + return split(true); + } + + public List<File> split(boolean deleteOriginal) throws IOException { + List<File> splitFlatFiles = new ArrayList<>(); + try { + FileUtils.forceMkdir(workDir); + } catch (IOException e) { + log.error("failed to create split directory {}", workDir.getAbsolutePath()); + return returnOriginalFlatFile(); + } + + long fileSizeInBytes = flatFile.length(); + long splitThreshold = Math.round((double) (fileSizeInBytes / splitSize)); + log.info("original flat file size: ~{}", FileUtils.byteCountToDisplaySize(fileSizeInBytes)); + log.info("split threshold is ~{} bytes, estimate split size >={} files", FileUtils.byteCountToDisplaySize(splitThreshold), splitSize); Review Comment: ```suggestion log.info("original flat file size: ~{}", FileUtils.byteCountToDisplaySize(fileSizeInBytes)); log.info("split threshold is ~{} bytes, estimate split size >={} files", FileUtils.byteCountToDisplaySize(splitThreshold), splitSize); ``` ########## oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/FlatFileSplitter.java: ########## @@ -0,0 +1,259 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.jackrabbit.oak.index.indexer.document.flatfile; + +import org.apache.commons.io.FileUtils; +import org.apache.jackrabbit.oak.api.PropertyState; +import org.apache.jackrabbit.oak.api.Type; +import org.apache.jackrabbit.oak.commons.Compression; +import org.apache.jackrabbit.oak.index.indexer.document.NodeStateEntry; +import org.apache.jackrabbit.oak.plugins.index.search.Aggregate; +import org.apache.jackrabbit.oak.plugins.index.search.IndexDefinition; +import org.apache.jackrabbit.oak.query.ast.NodeTypeInfo; +import org.apache.jackrabbit.oak.query.ast.NodeTypeInfoProvider; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.BufferedReader; +import java.io.BufferedWriter; +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.Stack; +import java.util.stream.Collectors; + +import static org.apache.jackrabbit.JcrConstants.JCR_PRIMARYTYPE; +import static org.apache.jackrabbit.JcrConstants.NT_BASE; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileNodeStoreBuilder.DEFAULT_NUMBER_OF_SPLIT_STORE_SIZE; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileNodeStoreBuilder.OAK_INDEXER_USE_LZ4; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileNodeStoreBuilder.OAK_INDEXER_USE_ZIP; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileNodeStoreBuilder.PROP_SPLIT_STORE_SIZE; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileStoreUtils.createReader; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileStoreUtils.createWriter; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileStoreUtils.getSortedStoreFileName; + +/** + * This class is being used when {@link FlatFileNodeStoreBuilder.OAK_INDEXER_PARALLEL_INDEX} is set to true. + * It will split a flat file safely by checking the index definitions. An entry is considered safe to split if only + * none of the parent directories contains nodes in indexRule and aggregate fields of the provided index definitions. + */ +public class FlatFileSplitter { + private static final Logger log = LoggerFactory.getLogger(FlatFileSplitter.class); + + private static final String SPLIT_DIR_NAME = "split"; + private static final long MINIMUM_SPLIT_THRESHOLD = 10 * FileUtils.ONE_MB; + + private final File workDir; + private final NodeTypeInfoProvider infoProvider; + private final File flatFile; + private final NodeStateEntryReader entryReader; + private final Compression.Algorithm algorithm; + private Set<IndexDefinition> indexDefinitions; + private Set<String> splitNodeTypeNames; + private long minimumSplitThreshold = MINIMUM_SPLIT_THRESHOLD; Review Comment: this is not set anywhere else. Should we remove it and use `MINIMUM_SPLIT_THRESHOLD ` where needed? ########## oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/FlatFileSplitter.java: ########## @@ -0,0 +1,259 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.jackrabbit.oak.index.indexer.document.flatfile; + +import org.apache.commons.io.FileUtils; +import org.apache.jackrabbit.oak.api.PropertyState; +import org.apache.jackrabbit.oak.api.Type; +import org.apache.jackrabbit.oak.commons.Compression; +import org.apache.jackrabbit.oak.index.indexer.document.NodeStateEntry; +import org.apache.jackrabbit.oak.plugins.index.search.Aggregate; +import org.apache.jackrabbit.oak.plugins.index.search.IndexDefinition; +import org.apache.jackrabbit.oak.query.ast.NodeTypeInfo; +import org.apache.jackrabbit.oak.query.ast.NodeTypeInfoProvider; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.BufferedReader; +import java.io.BufferedWriter; +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.Stack; +import java.util.stream.Collectors; + +import static org.apache.jackrabbit.JcrConstants.JCR_PRIMARYTYPE; +import static org.apache.jackrabbit.JcrConstants.NT_BASE; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileNodeStoreBuilder.DEFAULT_NUMBER_OF_SPLIT_STORE_SIZE; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileNodeStoreBuilder.OAK_INDEXER_USE_LZ4; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileNodeStoreBuilder.OAK_INDEXER_USE_ZIP; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileNodeStoreBuilder.PROP_SPLIT_STORE_SIZE; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileStoreUtils.createReader; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileStoreUtils.createWriter; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileStoreUtils.getSortedStoreFileName; + +/** + * This class is being used when {@link FlatFileNodeStoreBuilder.OAK_INDEXER_PARALLEL_INDEX} is set to true. + * It will split a flat file safely by checking the index definitions. An entry is considered safe to split if only + * none of the parent directories contains nodes in indexRule and aggregate fields of the provided index definitions. + */ +public class FlatFileSplitter { + private static final Logger log = LoggerFactory.getLogger(FlatFileSplitter.class); + + private static final String SPLIT_DIR_NAME = "split"; + private static final long MINIMUM_SPLIT_THRESHOLD = 10 * FileUtils.ONE_MB; + + private final File workDir; + private final NodeTypeInfoProvider infoProvider; + private final File flatFile; + private final NodeStateEntryReader entryReader; + private final Compression.Algorithm algorithm; + private Set<IndexDefinition> indexDefinitions; Review Comment: ```suggestion private final Set<IndexDefinition> indexDefinitions; ``` ########## oak-run-commons/src/test/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/FlatFileSplitterTest.java: ########## @@ -0,0 +1,446 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.jackrabbit.oak.index.indexer.document.flatfile; + +import org.apache.commons.io.FileUtils; +import org.apache.commons.lang3.reflect.FieldUtils; +import org.apache.jackrabbit.oak.InitialContent; +import org.apache.jackrabbit.oak.OakInitializer; +import org.apache.jackrabbit.oak.api.PropertyState; +import org.apache.jackrabbit.oak.api.Type; +import org.apache.jackrabbit.oak.commons.Compression; +import org.apache.jackrabbit.oak.index.indexer.document.NodeStateEntry; +import org.apache.jackrabbit.oak.plugins.index.search.IndexDefinition; +import org.apache.jackrabbit.oak.plugins.index.search.util.IndexDefinitionBuilder; +import org.apache.jackrabbit.oak.plugins.memory.MemoryNodeStore; +import org.apache.jackrabbit.oak.plugins.name.NamespaceEditorProvider; +import org.apache.jackrabbit.oak.plugins.nodetype.TypeEditorProvider; +import org.apache.jackrabbit.oak.query.ast.NodeTypeInfo; +import org.apache.jackrabbit.oak.query.ast.NodeTypeInfoProvider; +import org.apache.jackrabbit.oak.spi.blob.MemoryBlobStore; +import org.apache.jackrabbit.oak.spi.commit.CompositeEditorProvider; +import org.apache.jackrabbit.oak.spi.commit.EditorHook; +import org.apache.jackrabbit.oak.spi.state.NodeStore; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.mockito.Mockito; + +import java.io.BufferedReader; +import java.io.BufferedWriter; +import java.io.File; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Scanner; +import java.util.Set; +import java.util.stream.Stream; + +import static org.apache.jackrabbit.JcrConstants.JCR_PRIMARYTYPE; +import static org.apache.jackrabbit.JcrConstants.NT_BASE; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileStoreUtils.createReader; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileStoreUtils.createWriter; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +public class FlatFileSplitterTest { + private ClassLoader classLoader = getClass().getClassLoader(); + private MemoryBlobStore store = new MemoryBlobStore(); + private NodeStateEntryReader entryReader = new NodeStateEntryReader(store); + @Rule + public TemporaryFolder temporaryFolder = new TemporaryFolder(); + + @Test + public void ntBaseSkipSplit() throws IOException, IllegalAccessException { + Set<String> splitNodeTypeNames = new HashSet<>(); + splitNodeTypeNames.add(NT_BASE); + File flatFile = new File(classLoader.getResource("simple-split.json").getFile()); + FlatFileSplitter splitter = createTestSplitter(flatFile, 0, Integer.MAX_VALUE, false, splitNodeTypeNames); + List<File> flatFileList = splitter.split(false); + + assertEquals(1, flatFileList.size()); + assertEquals(flatFile, flatFileList.get(0)); + } + + @Test + public void belowThresholdSkipSplit() throws IOException, IllegalAccessException { + File flatFile = new File(classLoader.getResource("simple-split.json").getFile()); + FlatFileSplitter splitter = createTestSplitter(flatFile, Integer.MAX_VALUE, Integer.MAX_VALUE, false, null); + + List<File> flatFileList = splitter.split(false); + + assertEquals(1, flatFileList.size()); + assertEquals(flatFile, flatFileList.get(0)); + } + + @Test + public void unknownTypeNoSplit() throws IOException, IllegalAccessException { + Set<String> splitNodeTypeNames = new HashSet<>(); + File flatFile = new File(classLoader.getResource("unknown-no-split.json").getFile()); + FlatFileSplitter splitter = createTestSplitter(flatFile, 0, Integer.MAX_VALUE, false, splitNodeTypeNames); + + List<File> flatFileList = splitter.split(false); + + assertEquals(1, flatFileList.size()); + assertEquals(flatFile.length(), flatFileList.get(0).length()); + } + + @Test + public void deleteAfterSplit() throws IOException, IllegalAccessException { + Set<String> splitNodeTypeNames = new HashSet<>(); + File flatFile = new File(classLoader.getResource("simple-split.json").getFile()); + File copied = new File(temporaryFolder.newFile().getAbsolutePath()); + FileUtils.copyFile(flatFile, copied); + FlatFileSplitter splitter = createTestSplitter(copied, 0, Integer.MAX_VALUE, false, splitNodeTypeNames); + + long originalSize = flatFile.length(); + List<File> flatFileList = splitter.split(); + + assertEquals(originalSize, getTotalSize(flatFileList)); + assertTrue(flatFile.exists()); + assertTrue(!copied.exists()); + } + + @Test + public void simpleSplit() throws IOException, IllegalAccessException { + Set<String> splitNodeTypeNames = new HashSet<>(); + File flatFile = new File(classLoader.getResource("simple-split.json").getFile()); + File workDir = temporaryFolder.newFolder(); + FlatFileSplitter splitter = createTestSplitter(flatFile, 0, Integer.MAX_VALUE, false, splitNodeTypeNames); + + List<File> flatFileList = splitter.split(false); + + assertEquals(3, flatFileList.size()); + assertEquals(1, countLines(flatFileList.get(0))); + assertEquals(1, countLines(flatFileList.get(1))); + assertEquals(1, countLines(flatFileList.get(2))); + assertEquals(flatFile.length(), getTotalSize(flatFileList)); + } + + @Test + public void simpleSplitWithParent() throws IOException, IllegalAccessException { + Set<String> splitNodeTypeNames = new HashSet<>(); + splitNodeTypeNames.add("no-split"); + File flatFile = new File(classLoader.getResource("simple-split-with-parent.json").getFile()); + FlatFileSplitter splitter = createTestSplitter(flatFile, 0, Integer.MAX_VALUE, false, splitNodeTypeNames); + + List<File> flatFileList = splitter.split(false); + + assertEquals(2, flatFileList.size()); + assertEquals(4, countLines(flatFileList.get(0))); + assertEquals("no-split", startLineType(flatFileList.get(0))); + assertEquals(4, countLines(flatFileList.get(1))); + assertEquals("no-split", startLineType(flatFileList.get(1))); + assertEquals(flatFile.length(), getTotalSize(flatFileList)); + } + + @Test + public void simpleSplitWithNestedParent() throws IOException, IllegalAccessException { + Set<String> splitNodeTypeNames = new HashSet<>(); + splitNodeTypeNames.add("no-split"); + File flatFile = new File(classLoader.getResource("simple-split-with-nested-parent.json").getFile()); + FlatFileSplitter splitter = createTestSplitter(flatFile, 0, Integer.MAX_VALUE, false, splitNodeTypeNames); + + List<File> flatFileList = splitter.split(false); + + assertEquals(4, flatFileList.size()); + assertEquals(2, countLines(flatFileList.get(0))); + assertEquals("no-split", startLineType(flatFileList.get(0))); + assertEquals(4, countLines(flatFileList.get(1))); + assertEquals("no-split", startLineType(flatFileList.get(1))); + assertEquals(1, countLines(flatFileList.get(2))); + assertEquals("split", startLineType(flatFileList.get(2))); + assertEquals(2, countLines(flatFileList.get(3))); + assertEquals("no-split", startLineType(flatFileList.get(3))); + assertEquals(flatFile.length(), getTotalSize(flatFileList)); + } + + @Test + public void multipleNodeTypeSplitWithParent() throws IOException, IllegalAccessException { + Set<String> splitNodeTypeNames = new HashSet<>(); + splitNodeTypeNames.add("no-split-1"); + splitNodeTypeNames.add("no-split-2"); + File flatFile = new File(classLoader.getResource("multiple-node-type-simple-split-with-parent.json").getFile()); + FlatFileSplitter splitter = createTestSplitter(flatFile, 0, Integer.MAX_VALUE, false, splitNodeTypeNames); + + List<File> flatFileList = splitter.split(false); + + assertEquals(4, flatFileList.size()); + assertEquals(2, countLines(flatFileList.get(0))); + assertEquals("no-split-1", startLineType(flatFileList.get(0))); + assertEquals(2, countLines(flatFileList.get(1))); + assertEquals("no-split-2", startLineType(flatFileList.get(1))); + assertEquals(1, countLines(flatFileList.get(2))); + assertEquals(1, countLines(flatFileList.get(3))); + assertEquals(flatFile.length(), getTotalSize(flatFileList)); + } + + @Test + public void multipleNodeTypeSplitWithNestedParent() throws IOException, IllegalAccessException { + Set<String> splitNodeTypeNames = new HashSet<>(); + splitNodeTypeNames.add("no-split-1"); + splitNodeTypeNames.add("no-split-2"); + splitNodeTypeNames.add("no-split-3"); + splitNodeTypeNames.add("no-split-4"); + File flatFile = new File(classLoader.getResource("multiple-node-type-simple-split-with-nested-parent.json").getFile()); + FlatFileSplitter splitter = createTestSplitter(flatFile, 0, Integer.MAX_VALUE, false, splitNodeTypeNames); + + List<File> flatFileList = splitter.split(false); + + assertEquals(4, flatFileList.size()); + assertEquals(2, countLines(flatFileList.get(0))); + assertEquals("no-split-1", startLineType(flatFileList.get(0))); + assertEquals(4, countLines(flatFileList.get(1))); + assertEquals("no-split-2", startLineType(flatFileList.get(1))); + assertEquals(1, countLines(flatFileList.get(2))); + assertEquals("split", startLineType(flatFileList.get(2))); + assertEquals(2, countLines(flatFileList.get(3))); + assertEquals("no-split-4", startLineType(flatFileList.get(3))); + assertEquals(flatFile.length(), getTotalSize(flatFileList)); + } + + @Test + public void splitAsset() throws IOException, IllegalAccessException { + Set<String> splitNodeTypeNames = new HashSet<>(); + String assetNodeType = "dam:Asset"; + splitNodeTypeNames.add(assetNodeType); + File flatFile = new File(classLoader.getResource("complex-split.json").getFile()); + int expectedSplitSize = 5; + FlatFileSplitter splitter = createTestSplitter(flatFile, 0, expectedSplitSize, false, splitNodeTypeNames); + + + List<File> flatFileList = splitter.split(false); + + assertEquals(expectedSplitSize, flatFileList.size()); + assertEquals(flatFile.length(), getTotalSize(flatFileList)); + assertEquals(startLine(flatFile), startLine(flatFileList.get(0))); + for (int i = 1; i < flatFileList.size(); i++) { + assertEquals(assetNodeType, startLineType(flatFileList.get(i))); + } + } + + @Test + public void splitFolder() throws IOException, IllegalAccessException { + Set<String> splitNodeTypeNames = new HashSet<>(Arrays.asList( + "nt:file", + "cq:VirtualComponent", + "nt:folder", + "cq:PollConfigFolder", + "cq:ExporterConfigFolder", + "cq:ClientLibraryFolder", + "cq:ComponentMixin", + "cq:ContentSyncConfig", + "cq:Component", + "sling:OrderedFolder", + "sling:Folder", + "granite:Component")); + File flatFile = new File(classLoader.getResource("complex-split.json").getFile()); + int expectedSplitSize = 2; + FlatFileSplitter splitter = createTestSplitter(flatFile, 0, expectedSplitSize, false, splitNodeTypeNames); + + List<File> flatFileList = splitter.split(false); + + assertTrue(expectedSplitSize <= flatFileList.size()); + assertEquals(flatFile.length(), getTotalSize(flatFileList)); + assertEquals(startLineType(flatFile), startLineType(flatFileList.get(0))); + String expectedSplitPoint = "/etc|{\"jcr:primaryType\":\"nam:sling:Folder\"}"; + assertEquals(expectedSplitPoint, startLine(flatFileList.get(1))); + } + + @Test + public void splitFolderWithCompression() throws IOException, IllegalAccessException { + Set<String> splitNodeTypeNames = new HashSet<>(Arrays.asList( + "nt:file", + "cq:VirtualComponent", + "nt:folder", + "cq:PollConfigFolder", + "cq:ExporterConfigFolder", + "cq:ClientLibraryFolder", + "cq:ComponentMixin", + "cq:ContentSyncConfig", + "cq:Component", + "sling:OrderedFolder", + "sling:Folder", + "granite:Component")); + File rawFlatFile = new File(classLoader.getResource("complex-split.json").getFile());; + File flatFile = temporaryFolder.newFile(); + compress(rawFlatFile, flatFile); + int expectedSplitSize = 3; + FlatFileSplitter splitter = createTestSplitter(flatFile, 0, expectedSplitSize, true, splitNodeTypeNames); + + List<File> flatFileList = splitter.split(false); + List<File> rawFlatFileList = new ArrayList<>(); + + for (File f: flatFileList) { + File uf = temporaryFolder.newFile(); + uncompress(f, uf); + rawFlatFileList.add(uf); + } + + assertTrue(expectedSplitSize <= flatFileList.size()); + assertEquals(rawFlatFile.length(), getTotalSize(rawFlatFileList)); + } + + @Test + public void getSplitNodeTypeNames() throws IllegalAccessException { Review Comment: no exception is thrown here ```suggestion public void getSplitNodeTypeNames() { ``` ########## oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/ElasticConnection.java: ########## @@ -118,6 +119,9 @@ private Clients getClients() { Header[] headers = new Header[]{new BasicHeader("Authorization", "ApiKey " + apiKeyAuth)}; builder.setDefaultHeaders(headers); } + builder.setRequestConfigCallback( + requestConfigBuilder -> requestConfigBuilder.setConnectTimeout(ES_CLIENT_CONNECT_TIMEOUT).setSocketTimeout(ES_CLIENT_CONNECT_TIMEOUT)); Review Comment: the default connectTimeout is 1s, while the socketTimeout is 30s. I think we should increase only the latter. ########## oak-run-commons/src/test/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/FlatFileSplitterTest.java: ########## @@ -0,0 +1,446 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.jackrabbit.oak.index.indexer.document.flatfile; + +import org.apache.commons.io.FileUtils; +import org.apache.commons.lang3.reflect.FieldUtils; +import org.apache.jackrabbit.oak.InitialContent; +import org.apache.jackrabbit.oak.OakInitializer; +import org.apache.jackrabbit.oak.api.PropertyState; +import org.apache.jackrabbit.oak.api.Type; +import org.apache.jackrabbit.oak.commons.Compression; +import org.apache.jackrabbit.oak.index.indexer.document.NodeStateEntry; +import org.apache.jackrabbit.oak.plugins.index.search.IndexDefinition; +import org.apache.jackrabbit.oak.plugins.index.search.util.IndexDefinitionBuilder; +import org.apache.jackrabbit.oak.plugins.memory.MemoryNodeStore; +import org.apache.jackrabbit.oak.plugins.name.NamespaceEditorProvider; +import org.apache.jackrabbit.oak.plugins.nodetype.TypeEditorProvider; +import org.apache.jackrabbit.oak.query.ast.NodeTypeInfo; +import org.apache.jackrabbit.oak.query.ast.NodeTypeInfoProvider; +import org.apache.jackrabbit.oak.spi.blob.MemoryBlobStore; +import org.apache.jackrabbit.oak.spi.commit.CompositeEditorProvider; +import org.apache.jackrabbit.oak.spi.commit.EditorHook; +import org.apache.jackrabbit.oak.spi.state.NodeStore; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.mockito.Mockito; + +import java.io.BufferedReader; +import java.io.BufferedWriter; +import java.io.File; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Scanner; +import java.util.Set; +import java.util.stream.Stream; + +import static org.apache.jackrabbit.JcrConstants.JCR_PRIMARYTYPE; +import static org.apache.jackrabbit.JcrConstants.NT_BASE; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileStoreUtils.createReader; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileStoreUtils.createWriter; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +public class FlatFileSplitterTest { + private ClassLoader classLoader = getClass().getClassLoader(); + private MemoryBlobStore store = new MemoryBlobStore(); + private NodeStateEntryReader entryReader = new NodeStateEntryReader(store); + @Rule + public TemporaryFolder temporaryFolder = new TemporaryFolder(); + + @Test + public void ntBaseSkipSplit() throws IOException, IllegalAccessException { + Set<String> splitNodeTypeNames = new HashSet<>(); + splitNodeTypeNames.add(NT_BASE); + File flatFile = new File(classLoader.getResource("simple-split.json").getFile()); + FlatFileSplitter splitter = createTestSplitter(flatFile, 0, Integer.MAX_VALUE, false, splitNodeTypeNames); + List<File> flatFileList = splitter.split(false); + + assertEquals(1, flatFileList.size()); + assertEquals(flatFile, flatFileList.get(0)); + } + + @Test + public void belowThresholdSkipSplit() throws IOException, IllegalAccessException { + File flatFile = new File(classLoader.getResource("simple-split.json").getFile()); + FlatFileSplitter splitter = createTestSplitter(flatFile, Integer.MAX_VALUE, Integer.MAX_VALUE, false, null); + + List<File> flatFileList = splitter.split(false); + + assertEquals(1, flatFileList.size()); + assertEquals(flatFile, flatFileList.get(0)); + } + + @Test + public void unknownTypeNoSplit() throws IOException, IllegalAccessException { + Set<String> splitNodeTypeNames = new HashSet<>(); + File flatFile = new File(classLoader.getResource("unknown-no-split.json").getFile()); + FlatFileSplitter splitter = createTestSplitter(flatFile, 0, Integer.MAX_VALUE, false, splitNodeTypeNames); + + List<File> flatFileList = splitter.split(false); + + assertEquals(1, flatFileList.size()); + assertEquals(flatFile.length(), flatFileList.get(0).length()); + } + + @Test + public void deleteAfterSplit() throws IOException, IllegalAccessException { + Set<String> splitNodeTypeNames = new HashSet<>(); + File flatFile = new File(classLoader.getResource("simple-split.json").getFile()); + File copied = new File(temporaryFolder.newFile().getAbsolutePath()); + FileUtils.copyFile(flatFile, copied); + FlatFileSplitter splitter = createTestSplitter(copied, 0, Integer.MAX_VALUE, false, splitNodeTypeNames); + + long originalSize = flatFile.length(); + List<File> flatFileList = splitter.split(); + + assertEquals(originalSize, getTotalSize(flatFileList)); + assertTrue(flatFile.exists()); + assertTrue(!copied.exists()); Review Comment: ```suggestion assertFalse(copied.exists()); ``` ########## oak-run-commons/src/test/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/FlatFileSplitterTest.java: ########## @@ -0,0 +1,446 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.jackrabbit.oak.index.indexer.document.flatfile; + +import org.apache.commons.io.FileUtils; +import org.apache.commons.lang3.reflect.FieldUtils; +import org.apache.jackrabbit.oak.InitialContent; +import org.apache.jackrabbit.oak.OakInitializer; +import org.apache.jackrabbit.oak.api.PropertyState; +import org.apache.jackrabbit.oak.api.Type; +import org.apache.jackrabbit.oak.commons.Compression; +import org.apache.jackrabbit.oak.index.indexer.document.NodeStateEntry; +import org.apache.jackrabbit.oak.plugins.index.search.IndexDefinition; +import org.apache.jackrabbit.oak.plugins.index.search.util.IndexDefinitionBuilder; +import org.apache.jackrabbit.oak.plugins.memory.MemoryNodeStore; +import org.apache.jackrabbit.oak.plugins.name.NamespaceEditorProvider; +import org.apache.jackrabbit.oak.plugins.nodetype.TypeEditorProvider; +import org.apache.jackrabbit.oak.query.ast.NodeTypeInfo; +import org.apache.jackrabbit.oak.query.ast.NodeTypeInfoProvider; +import org.apache.jackrabbit.oak.spi.blob.MemoryBlobStore; +import org.apache.jackrabbit.oak.spi.commit.CompositeEditorProvider; +import org.apache.jackrabbit.oak.spi.commit.EditorHook; +import org.apache.jackrabbit.oak.spi.state.NodeStore; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.mockito.Mockito; + +import java.io.BufferedReader; +import java.io.BufferedWriter; +import java.io.File; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Scanner; +import java.util.Set; +import java.util.stream.Stream; + +import static org.apache.jackrabbit.JcrConstants.JCR_PRIMARYTYPE; +import static org.apache.jackrabbit.JcrConstants.NT_BASE; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileStoreUtils.createReader; +import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileStoreUtils.createWriter; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +public class FlatFileSplitterTest { + private ClassLoader classLoader = getClass().getClassLoader(); + private MemoryBlobStore store = new MemoryBlobStore(); + private NodeStateEntryReader entryReader = new NodeStateEntryReader(store); + @Rule + public TemporaryFolder temporaryFolder = new TemporaryFolder(); + + @Test + public void ntBaseSkipSplit() throws IOException, IllegalAccessException { + Set<String> splitNodeTypeNames = new HashSet<>(); + splitNodeTypeNames.add(NT_BASE); + File flatFile = new File(classLoader.getResource("simple-split.json").getFile()); + FlatFileSplitter splitter = createTestSplitter(flatFile, 0, Integer.MAX_VALUE, false, splitNodeTypeNames); + List<File> flatFileList = splitter.split(false); + + assertEquals(1, flatFileList.size()); + assertEquals(flatFile, flatFileList.get(0)); + } + + @Test + public void belowThresholdSkipSplit() throws IOException, IllegalAccessException { + File flatFile = new File(classLoader.getResource("simple-split.json").getFile()); + FlatFileSplitter splitter = createTestSplitter(flatFile, Integer.MAX_VALUE, Integer.MAX_VALUE, false, null); + + List<File> flatFileList = splitter.split(false); + + assertEquals(1, flatFileList.size()); + assertEquals(flatFile, flatFileList.get(0)); + } + + @Test + public void unknownTypeNoSplit() throws IOException, IllegalAccessException { + Set<String> splitNodeTypeNames = new HashSet<>(); + File flatFile = new File(classLoader.getResource("unknown-no-split.json").getFile()); + FlatFileSplitter splitter = createTestSplitter(flatFile, 0, Integer.MAX_VALUE, false, splitNodeTypeNames); + + List<File> flatFileList = splitter.split(false); + + assertEquals(1, flatFileList.size()); + assertEquals(flatFile.length(), flatFileList.get(0).length()); + } + + @Test + public void deleteAfterSplit() throws IOException, IllegalAccessException { + Set<String> splitNodeTypeNames = new HashSet<>(); + File flatFile = new File(classLoader.getResource("simple-split.json").getFile()); + File copied = new File(temporaryFolder.newFile().getAbsolutePath()); + FileUtils.copyFile(flatFile, copied); + FlatFileSplitter splitter = createTestSplitter(copied, 0, Integer.MAX_VALUE, false, splitNodeTypeNames); + + long originalSize = flatFile.length(); + List<File> flatFileList = splitter.split(); + + assertEquals(originalSize, getTotalSize(flatFileList)); + assertTrue(flatFile.exists()); + assertTrue(!copied.exists()); + } + + @Test + public void simpleSplit() throws IOException, IllegalAccessException { + Set<String> splitNodeTypeNames = new HashSet<>(); + File flatFile = new File(classLoader.getResource("simple-split.json").getFile()); + File workDir = temporaryFolder.newFolder(); + FlatFileSplitter splitter = createTestSplitter(flatFile, 0, Integer.MAX_VALUE, false, splitNodeTypeNames); + + List<File> flatFileList = splitter.split(false); + + assertEquals(3, flatFileList.size()); + assertEquals(1, countLines(flatFileList.get(0))); + assertEquals(1, countLines(flatFileList.get(1))); + assertEquals(1, countLines(flatFileList.get(2))); + assertEquals(flatFile.length(), getTotalSize(flatFileList)); + } + + @Test + public void simpleSplitWithParent() throws IOException, IllegalAccessException { + Set<String> splitNodeTypeNames = new HashSet<>(); + splitNodeTypeNames.add("no-split"); + File flatFile = new File(classLoader.getResource("simple-split-with-parent.json").getFile()); + FlatFileSplitter splitter = createTestSplitter(flatFile, 0, Integer.MAX_VALUE, false, splitNodeTypeNames); + + List<File> flatFileList = splitter.split(false); + + assertEquals(2, flatFileList.size()); + assertEquals(4, countLines(flatFileList.get(0))); + assertEquals("no-split", startLineType(flatFileList.get(0))); + assertEquals(4, countLines(flatFileList.get(1))); + assertEquals("no-split", startLineType(flatFileList.get(1))); + assertEquals(flatFile.length(), getTotalSize(flatFileList)); + } + + @Test + public void simpleSplitWithNestedParent() throws IOException, IllegalAccessException { + Set<String> splitNodeTypeNames = new HashSet<>(); + splitNodeTypeNames.add("no-split"); + File flatFile = new File(classLoader.getResource("simple-split-with-nested-parent.json").getFile()); + FlatFileSplitter splitter = createTestSplitter(flatFile, 0, Integer.MAX_VALUE, false, splitNodeTypeNames); + + List<File> flatFileList = splitter.split(false); + + assertEquals(4, flatFileList.size()); + assertEquals(2, countLines(flatFileList.get(0))); + assertEquals("no-split", startLineType(flatFileList.get(0))); + assertEquals(4, countLines(flatFileList.get(1))); + assertEquals("no-split", startLineType(flatFileList.get(1))); + assertEquals(1, countLines(flatFileList.get(2))); + assertEquals("split", startLineType(flatFileList.get(2))); + assertEquals(2, countLines(flatFileList.get(3))); + assertEquals("no-split", startLineType(flatFileList.get(3))); + assertEquals(flatFile.length(), getTotalSize(flatFileList)); + } + + @Test + public void multipleNodeTypeSplitWithParent() throws IOException, IllegalAccessException { + Set<String> splitNodeTypeNames = new HashSet<>(); + splitNodeTypeNames.add("no-split-1"); + splitNodeTypeNames.add("no-split-2"); + File flatFile = new File(classLoader.getResource("multiple-node-type-simple-split-with-parent.json").getFile()); + FlatFileSplitter splitter = createTestSplitter(flatFile, 0, Integer.MAX_VALUE, false, splitNodeTypeNames); + + List<File> flatFileList = splitter.split(false); + + assertEquals(4, flatFileList.size()); + assertEquals(2, countLines(flatFileList.get(0))); + assertEquals("no-split-1", startLineType(flatFileList.get(0))); + assertEquals(2, countLines(flatFileList.get(1))); + assertEquals("no-split-2", startLineType(flatFileList.get(1))); + assertEquals(1, countLines(flatFileList.get(2))); + assertEquals(1, countLines(flatFileList.get(3))); + assertEquals(flatFile.length(), getTotalSize(flatFileList)); + } + + @Test + public void multipleNodeTypeSplitWithNestedParent() throws IOException, IllegalAccessException { + Set<String> splitNodeTypeNames = new HashSet<>(); + splitNodeTypeNames.add("no-split-1"); + splitNodeTypeNames.add("no-split-2"); + splitNodeTypeNames.add("no-split-3"); + splitNodeTypeNames.add("no-split-4"); + File flatFile = new File(classLoader.getResource("multiple-node-type-simple-split-with-nested-parent.json").getFile()); + FlatFileSplitter splitter = createTestSplitter(flatFile, 0, Integer.MAX_VALUE, false, splitNodeTypeNames); + + List<File> flatFileList = splitter.split(false); + + assertEquals(4, flatFileList.size()); + assertEquals(2, countLines(flatFileList.get(0))); + assertEquals("no-split-1", startLineType(flatFileList.get(0))); + assertEquals(4, countLines(flatFileList.get(1))); + assertEquals("no-split-2", startLineType(flatFileList.get(1))); + assertEquals(1, countLines(flatFileList.get(2))); + assertEquals("split", startLineType(flatFileList.get(2))); + assertEquals(2, countLines(flatFileList.get(3))); + assertEquals("no-split-4", startLineType(flatFileList.get(3))); + assertEquals(flatFile.length(), getTotalSize(flatFileList)); + } + + @Test + public void splitAsset() throws IOException, IllegalAccessException { + Set<String> splitNodeTypeNames = new HashSet<>(); + String assetNodeType = "dam:Asset"; + splitNodeTypeNames.add(assetNodeType); + File flatFile = new File(classLoader.getResource("complex-split.json").getFile()); + int expectedSplitSize = 5; + FlatFileSplitter splitter = createTestSplitter(flatFile, 0, expectedSplitSize, false, splitNodeTypeNames); + + + List<File> flatFileList = splitter.split(false); + + assertEquals(expectedSplitSize, flatFileList.size()); + assertEquals(flatFile.length(), getTotalSize(flatFileList)); + assertEquals(startLine(flatFile), startLine(flatFileList.get(0))); + for (int i = 1; i < flatFileList.size(); i++) { + assertEquals(assetNodeType, startLineType(flatFileList.get(i))); + } + } + + @Test + public void splitFolder() throws IOException, IllegalAccessException { + Set<String> splitNodeTypeNames = new HashSet<>(Arrays.asList( + "nt:file", + "cq:VirtualComponent", + "nt:folder", + "cq:PollConfigFolder", + "cq:ExporterConfigFolder", + "cq:ClientLibraryFolder", + "cq:ComponentMixin", + "cq:ContentSyncConfig", + "cq:Component", + "sling:OrderedFolder", + "sling:Folder", + "granite:Component")); + File flatFile = new File(classLoader.getResource("complex-split.json").getFile()); + int expectedSplitSize = 2; + FlatFileSplitter splitter = createTestSplitter(flatFile, 0, expectedSplitSize, false, splitNodeTypeNames); + + List<File> flatFileList = splitter.split(false); + + assertTrue(expectedSplitSize <= flatFileList.size()); + assertEquals(flatFile.length(), getTotalSize(flatFileList)); + assertEquals(startLineType(flatFile), startLineType(flatFileList.get(0))); + String expectedSplitPoint = "/etc|{\"jcr:primaryType\":\"nam:sling:Folder\"}"; + assertEquals(expectedSplitPoint, startLine(flatFileList.get(1))); + } + + @Test + public void splitFolderWithCompression() throws IOException, IllegalAccessException { + Set<String> splitNodeTypeNames = new HashSet<>(Arrays.asList( + "nt:file", + "cq:VirtualComponent", + "nt:folder", + "cq:PollConfigFolder", + "cq:ExporterConfigFolder", + "cq:ClientLibraryFolder", + "cq:ComponentMixin", + "cq:ContentSyncConfig", + "cq:Component", + "sling:OrderedFolder", + "sling:Folder", + "granite:Component")); + File rawFlatFile = new File(classLoader.getResource("complex-split.json").getFile());; + File flatFile = temporaryFolder.newFile(); + compress(rawFlatFile, flatFile); + int expectedSplitSize = 3; + FlatFileSplitter splitter = createTestSplitter(flatFile, 0, expectedSplitSize, true, splitNodeTypeNames); + + List<File> flatFileList = splitter.split(false); + List<File> rawFlatFileList = new ArrayList<>(); + + for (File f: flatFileList) { + File uf = temporaryFolder.newFile(); + uncompress(f, uf); + rawFlatFileList.add(uf); + } + + assertTrue(expectedSplitSize <= flatFileList.size()); + assertEquals(rawFlatFile.length(), getTotalSize(rawFlatFileList)); + } + + @Test + public void getSplitNodeTypeNames() throws IllegalAccessException { + NodeStore store = new MemoryNodeStore(); + EditorHook hook = new EditorHook( + new CompositeEditorProvider(new NamespaceEditorProvider(), new TypeEditorProvider())); + OakInitializer.initialize(store, new InitialContent(), hook); + + Set<IndexDefinition> defns = new HashSet<>(); + + IndexDefinitionBuilder defnb1 = new IndexDefinitionBuilder(); + defnb1.indexRule("testIndexRule1"); + defnb1.aggregateRule("testAggregate1"); + IndexDefinition defn1 = IndexDefinition.newBuilder(store.getRoot(), defnb1.build(), "/foo").build(); + defns.add(defn1); + + IndexDefinitionBuilder defnb2 = new IndexDefinitionBuilder(); + defnb2.indexRule("testIndexRule2"); + defnb2.aggregateRule("testAggregate2"); + defnb2.aggregateRule("testAggregate3"); + IndexDefinition defn2 = IndexDefinition.newBuilder(store.getRoot(), defnb2.build(), "/bar").build(); + defns.add(defn2); + + List<String> resultNodeTypes = new ArrayList<>(); + NodeTypeInfoProvider mockNodeTypeInfoProvider = Mockito.mock(NodeTypeInfoProvider.class); + for (String nodeType: new ArrayList<String>(Arrays.asList( + "testIndexRule1", + "testIndexRule2", + "testAggregate1", + "testAggregate2", + "testAggregate3" + ))) { Review Comment: ```suggestion for (String nodeType: Arrays.asList( "testIndexRule1", "testIndexRule2", "testAggregate1", "testAggregate2", "testAggregate3" )) { ``` ########## oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/progress/IndexingProgressReporter.java: ########## @@ -229,10 +231,12 @@ public IndexUpdateState(String indexPath, boolean reindex, long estimatedCount) } public void indexUpdate() throws CommitFailedException { - updateCount++; - if (updateCount % 10000 == 0) { - log.info("{} => Indexed {} nodes in {} ...", indexPath, updateCount, watch); - watch.reset().start(); + updateCount.incrementAndGet(); + if (updateCount.get() % 10000 == 0) { Review Comment: +1 ########## oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/DocumentStoreIndexerBase.java: ########## @@ -265,6 +271,53 @@ public void reindex() throws CommitFailedException, IOException { indexerSupport.postIndexWork(copyOnWriteStore); } + private void indexParallel(List<FlatFileStore> storeList, CompositeIndexer indexer, IndexingProgressReporter progressReporter) { + ExecutorService service = Executors.newFixedThreadPool(INDEX_THREAD_POOL_SIZE); + List<Future> futureList = new ArrayList<>(); + + for (FlatFileStore item : storeList) { + Future future = service.submit(new Callable<Boolean>() { + @Override + public Boolean call() throws IOException, CommitFailedException { + for (NodeStateEntry entry : item) { + reportDocumentRead(entry.getPath(), progressReporter); + indexer.index(entry); + } + return true; + } + }); + futureList.add(future); + } + + try { + for (Future future : futureList) { + future.get(); + } + log.info("All {} indexing jobs are done", storeList.size()); + service.shutdown(); Review Comment: +1 ########## oak-run/src/test/java/org/apache/jackrabbit/oak/index/DocumentStoreIndexerIT.java: ########## @@ -214,6 +214,16 @@ protected CompositeIndexer prepareIndexers(NodeStore nodeStore, NodeBuilder buil } + @Test + public void testParallelIndexing() throws Exception { + try { + System.setProperty(FlatFileNodeStoreBuilder.OAK_INDEXER_PARALLEL_INDEX, "true"); + bundling(); + } finally { + System.clearProperty(FlatFileNodeStoreBuilder.OAK_INDEXER_PARALLEL_INDEX); + } + } + Review Comment: you could use the utility introduced in https://github.com/apache/jackrabbit-oak/pull/595 to set/clean system properties ########## oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/index/ElasticBulkProcessorHandler.java: ########## @@ -151,6 +152,7 @@ private BulkProcessor initBulkProcessor() { return BulkProcessor.builder(requestConsumer(), new OakBulkProcessorListener(), this.indexName + "-bulk-processor") .setBulkActions(indexDefinition.bulkActions) + .setConcurrentRequests(BULK_PROCESSOR_CONCURRENCY) Review Comment: I agree. The concurrent number of requests should be configurable (1 by default). ########## oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/DocumentStoreIndexerBase.java: ########## @@ -265,6 +271,53 @@ public void reindex() throws CommitFailedException, IOException { indexerSupport.postIndexWork(copyOnWriteStore); } + private void indexParallel(List<FlatFileStore> storeList, CompositeIndexer indexer, IndexingProgressReporter progressReporter) { + ExecutorService service = Executors.newFixedThreadPool(INDEX_THREAD_POOL_SIZE); + List<Future> futureList = new ArrayList<>(); + + for (FlatFileStore item : storeList) { + Future future = service.submit(new Callable<Boolean>() { + @Override + public Boolean call() throws IOException, CommitFailedException { + for (NodeStateEntry entry : item) { + reportDocumentRead(entry.getPath(), progressReporter); + indexer.index(entry); + } + return true; + } + }); + futureList.add(future); + } + + try { + for (Future future : futureList) { + future.get(); + } + log.info("All {} indexing jobs are done", storeList.size()); + service.shutdown(); + } catch (InterruptedException | ExecutionException e) { + log.error("Failure getting indexing job result", e); Review Comment: if one or more indexers throw an Exception, we just log the error but the process continues. @thomasmueller do you think that's okay? ########## oak-commons/src/main/java/org/apache/jackrabbit/oak/commons/sort/ExternalSort.java: ########## @@ -17,6 +17,11 @@ package org.apache.jackrabbit.oak.commons.sort; // filename: ExternalSort.java + +import net.jpountz.lz4.LZ4FrameInputStream; +import net.jpountz.lz4.LZ4FrameOutputStream; Review Comment: unused imports. Please remove also imports from line 43 to 45 ```suggestion ``` -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: dev-unsubscr...@jackrabbit.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org