This is an automated email from the ASF dual-hosted git repository.
zhouxj pushed a commit to branch feature/GEODE-3244
in repository https://gitbox.apache.org/repos/asf/geode.git
The following commit(s) were added to refs/heads/feature/GEODE-3244 by this
push:
new 7535ff4 GEODE-3244: update the mappers based algorithm
7535ff4 is described below
commit 7535ff412df144c3f25424028f99f697f396089d
Author: zhouxh <[email protected]>
AuthorDate: Fri Oct 6 17:38:49 2017 -0700
GEODE-3244: update the mappers based algorithm
---
.../repository/serializer/FieldsWithPrefix.java | 7 +-
.../serializer/FlatFormatSerializer.java | 122 +++++++--------------
.../NestedObjectSeralizerIntegrationTest.java | 14 ++-
3 files changed, 58 insertions(+), 85 deletions(-)
diff --git
a/geode-lucene/src/main/java/org/apache/geode/cache/lucene/internal/repository/serializer/FieldsWithPrefix.java
b/geode-lucene/src/main/java/org/apache/geode/cache/lucene/internal/repository/serializer/FieldsWithPrefix.java
index cc40439..ccab403 100644
---
a/geode-lucene/src/main/java/org/apache/geode/cache/lucene/internal/repository/serializer/FieldsWithPrefix.java
+++
b/geode-lucene/src/main/java/org/apache/geode/cache/lucene/internal/repository/serializer/FieldsWithPrefix.java
@@ -16,17 +16,22 @@ package
org.apache.geode.cache.lucene.internal.repository.serializer;
import java.util.HashSet;
+import org.apache.geode.cache.lucene.LuceneSerializer;
+
public class FieldsWithPrefix {
+ LuceneSerializer serializer;
HashSet<String> topLevelFieldSet;
String prefix;
FieldsWithPrefix(HashSet<String> topLevelFieldSet, String prefix) {
+ // TODO initialize serializer
this.topLevelFieldSet = topLevelFieldSet;
this.prefix = prefix;
}
@Override
public String toString() {
- return "prefix is " + prefix + ", topLevelFieldSet is " + topLevelFieldSet;
+ return "prefix is " + prefix + ", topLevelFieldSet is " + topLevelFieldSet
+ ":serializer is "
+ + serializer;
}
}
diff --git
a/geode-lucene/src/main/java/org/apache/geode/cache/lucene/internal/repository/serializer/FlatFormatSerializer.java
b/geode-lucene/src/main/java/org/apache/geode/cache/lucene/internal/repository/serializer/FlatFormatSerializer.java
index d1eb02b..3f22b3f 100644
---
a/geode-lucene/src/main/java/org/apache/geode/cache/lucene/internal/repository/serializer/FlatFormatSerializer.java
+++
b/geode-lucene/src/main/java/org/apache/geode/cache/lucene/internal/repository/serializer/FlatFormatSerializer.java
@@ -24,48 +24,29 @@ import java.util.Map;
import org.apache.geode.cache.lucene.LuceneIndex;
import org.apache.geode.cache.lucene.LuceneSerializer;
+import org.apache.geode.cache.lucene.LuceneService;
import org.apache.geode.internal.logging.LogService;
import org.apache.geode.internal.util.concurrent.CopyOnWriteWeakHashMap;
import org.apache.geode.pdx.PdxInstance;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field.Store;
-import org.apache.lucene.document.FieldType;
-import org.apache.lucene.document.LongPoint;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.IndexableField;
public class FlatFormatSerializer implements LuceneSerializer {
- /**
- * A mapper for converting a PDX object into a document
- */
- private LuceneSerializer pdxMapper;
-
- /**
- * Mappers for each individual class type that this class has seen.
- *
- * Weak so that entry will be removed if a class is garbage collected.
- */
- private Map<Class<?>, LuceneSerializer> mappers =
- new CopyOnWriteWeakHashMap<Class<?>, LuceneSerializer>();
/**
* keep class to defined fields mapping for each class For example, Customer
object based on
* indexed fields name, contact.name, contact.address,
contact.homepage.title will become
* <Customer.class, (<name, contact>, prefix=null)>; <Person.class, (<name,
address,
- * homepage.title>, prefix=contact)>, <Page.class, (<title>,
prefix=contact.homepage)>
+ * homepage.title>, prefix=contact)>, <Page.class, (<title>,
prefix=homepage)>
*/
- public HashMap<Class<?>, FieldsWithPrefix> classToFieldsMapping = new
HashMap();
+ public HashMap<Class<?>, FieldsWithPrefix> classToFieldsMapping = new
HashMap<>();
private static final Logger logger = LogService.getLogger();
- public FlatFormatSerializer() {
- final PrimitiveSerializer primitiveSerializer = new PrimitiveSerializer();
- SerializerUtil.supportedPrimitiveTypes().stream()
- .forEach(type -> mappers.put(type, primitiveSerializer));
-
- pdxMapper = new PdxLuceneSerializer();
- }
+ public FlatFormatSerializer() {}
@Override
public Collection<Document> toDocuments(LuceneIndex index, Object value) {
@@ -88,59 +69,38 @@ public class FlatFormatSerializer implements
LuceneSerializer {
*/
public LuceneSerializer getFieldMapper(Object value, String[] indexedFields)
{
if (value instanceof PdxInstance) {
- return FlatFormatSerializer.this.pdxMapper;
+ throw new IllegalStateException("FlatFormatSerializer does not support
PDX");
} else {
- Class<?> clazz = value.getClass();
- LuceneSerializer mapper = null;
- synchronized (mappers) {
- mapper = mappers.get(clazz);
- if (mapper == null) {
- // parse this nested object (i.e. customerObject) to build
class-fields map for each field
- // At this time, the parameters are (customerObject, <name,
contact.name, contact.address,
- // contact.homepage.title>)
- // create classToFields mapping for value's class
- // finally classToFieldsMapping will contain:
- // <Customer.class, <name, contact>>, <Person.class, <name, address,
homepage>>,
- // <Page.class, <title>>
- createClassToFieldsMapping(value, indexedFields, null);
-
- // search in current mappers for all the classes used in this nested
object
- for (Map.Entry<Class<?>, FieldsWithPrefix> entry :
classToFieldsMapping.entrySet()) {
- Class<?> aClassWithIndexedFields = entry.getKey();
- if (mappers.get(aClassWithIndexedFields) == null) {
- // if the class is still not in mappers, and it contains
non-nested fields, just
- // create
- // ReflectionLuceneSerializer
- HashSet<String> topLevelFieldSet =
entry.getValue().topLevelFieldSet;
- String prefix = entry.getValue().prefix;
- String[] fields = topLevelFieldSet.toArray(new
String[topLevelFieldSet.size()]);
-
- // parameters are (Customer.class, <name, contact>)
- mapper =
- new
FlatFormatReflectionLuceneSerializer(aClassWithIndexedFields, fields, prefix);
- mappers.put(aClassWithIndexedFields, mapper);
- }
- }
- return mappers.get(clazz);
- } else {
- return mapper;
- }
- }
+ return createClassToFieldsMapping(value, indexedFields, null);
}
}
/*
* create value's class to its indexed fields mapping
*/
- private void createClassToFieldsMapping(Object value, String[]
indexedFields, String prefix) {
+ private synchronized LuceneSerializer createClassToFieldsMapping(Object
value,
+ String[] indexedFields, String prefix) {
Class<?> clazz = value.getClass();
FieldsWithPrefix fieldsWithPrefix = classToFieldsMapping.get(clazz);
if (fieldsWithPrefix == null) {
+ if (SerializerUtil.supportedPrimitiveTypes().contains(clazz)
+ &&
Arrays.asList(indexedFields).contains(LuceneService.REGION_VALUE_FIELD)) {
+ final PrimitiveSerializer primitiveSerializer = new
PrimitiveSerializer();
+ final HashSet regionValueFields = new HashSet();
+ regionValueFields.add(LuceneService.REGION_VALUE_FIELD);
+ final FieldsWithPrefix regionValueFieldsWithPrefix =
+ new FieldsWithPrefix(regionValueFields, null);
+ regionValueFieldsWithPrefix.serializer = primitiveSerializer;
+ SerializerUtil.supportedPrimitiveTypes().stream()
+ .forEach(type -> classToFieldsMapping.put(type,
regionValueFieldsWithPrefix));
+ return primitiveSerializer;
+ }
+
HashSet<String> topLevelFieldSet = new HashSet<String>();
fieldsWithPrefix = new FieldsWithPrefix(topLevelFieldSet, prefix);
classToFieldsMapping.put(clazz, fieldsWithPrefix);
} else {
- return;
+ return fieldsWithPrefix.serializer;
}
if (logger.isDebugEnabled()) {
logger.debug("createClassToFieldsMapping:found an undefined class,
object value is " + value
@@ -175,41 +135,39 @@ public class FlatFormatSerializer implements
LuceneSerializer {
+ ":parentToChildFieldsMap is " + parentToChildFieldsMapping);
}
+ String[] fields = fieldsWithPrefix.topLevelFieldSet
+ .toArray(new String[fieldsWithPrefix.topLevelFieldSet.size()]);
+ fieldsWithPrefix.serializer = new
FlatFormatReflectionLuceneSerializer(clazz, fields, prefix);
+
// Now we've got a topLevelFieldSet, such as <name, contact> and a
parentToChildFieldsMapping
// such as <contact, <name, address, homepage.title>>
// for each parent field, i.e. contact, recursively process with its
childFieldSet, i.e. <name,
// address, homepage.title>
for (Map.Entry<String, HashSet<String>> entry :
parentToChildFieldsMapping.entrySet()) {
- try {
- String parentFieldName = entry.getKey();
- HashSet<String> childFieldSet = entry.getValue();
- String[] childFields = childFieldSet.toArray(new
String[childFieldSet.size()]);
+ String parentFieldName = entry.getKey();
+ HashSet<String> childFieldSet = entry.getValue();
+ String[] childFields = childFieldSet.toArray(new
String[childFieldSet.size()]);
- // parentFieldName is to get childValue
- Object childValue = getFieldValue(value, parentFieldName);
+ // parentFieldName is to get childValue
+ Object childValue = getFieldValue(value, parentFieldName);
- // now we've got a child object, find class-fields mapping for its
fields
- // this time, the parameters are (personObject, <name, address,
homepage.title>)
- createClassToFieldsMapping(childValue, childFields, parentFieldName);
- } catch (Exception e) {
- e.printStackTrace();
- }
+ // now we've got a child object, find class-fields mapping for its fields
+ // this time, the parameters are (personObject, <name, address,
homepage.title>)
+ createClassToFieldsMapping(childValue, childFields, parentFieldName);
}
+
+ return fieldsWithPrefix.serializer;
}
- private Object getFieldValue(Object parentValue, String parentFieldName)
- throws NoSuchFieldException, SecurityException {
- // now check if there's grandchild in nestedFields
+ private Object getFieldValue(Object parentValue, String parentFieldName) {
Class<?> parentClazz = parentValue.getClass();
- Field field = parentClazz.getDeclaredField(parentFieldName);
- Object childValue = null;
try {
+ Field field = parentClazz.getDeclaredField(parentFieldName);
field.setAccessible(true);
- childValue = field.get(parentValue);
+ return field.get(parentValue);
} catch (Exception e) {
- e.printStackTrace(System.out);
+ return null;
}
- return childValue;
}
class FlatFormatReflectionLuceneSerializer extends
ReflectionLuceneSerializer {
diff --git
a/geode-lucene/src/test/java/org/apache/geode/cache/lucene/NestedObjectSeralizerIntegrationTest.java
b/geode-lucene/src/test/java/org/apache/geode/cache/lucene/NestedObjectSeralizerIntegrationTest.java
index 10ad538..61abba4 100644
---
a/geode-lucene/src/test/java/org/apache/geode/cache/lucene/NestedObjectSeralizerIntegrationTest.java
+++
b/geode-lucene/src/test/java/org/apache/geode/cache/lucene/NestedObjectSeralizerIntegrationTest.java
@@ -43,7 +43,7 @@ public class NestedObjectSeralizerIntegrationTest extends
LuceneIntegrationTest
luceneService.createIndexFactory().setLuceneSerializer(new
FlatFormatSerializer())
.addField("name").addField("contact.name").addField("contact.email",
new KeywordAnalyzer())
.addField("contact.address").addField("contact.homepage.content")
- .create(INDEX_NAME, REGION_NAME);
+ .addField(LuceneService.REGION_VALUE_FIELD).create(INDEX_NAME,
REGION_NAME);
Region region = createRegion(REGION_NAME, RegionShortcut.PARTITION);
@@ -52,6 +52,10 @@ public class NestedObjectSeralizerIntegrationTest extends
LuceneIntegrationTest
region.put("object-14", new Customer("Johnny Jackson"));
region.put("object-15", new Customer("Johnny Jackson2"));
region.put("object-16", new Customer("Johnny Jackson21"));
+ region.put("key-1", "region value 1");
+ region.put("key-2", "region value 2");
+ region.put("key-3", "region value 3");
+ region.put("key-4", "region value 4");
LuceneIndex index = luceneService.getIndex(INDEX_NAME, REGION_NAME);
luceneService.waitUntilFlushed(INDEX_NAME, REGION_NAME,
WAIT_FOR_FLUSH_TIME,
@@ -116,13 +120,19 @@ public class NestedObjectSeralizerIntegrationTest extends
LuceneIntegrationTest
results = query.findPages();
printResults(results);
assertEquals(0, results.size());
+
+ query = luceneService.createLuceneQueryFactory().create(INDEX_NAME,
REGION_NAME, "region",
+ LuceneService.REGION_VALUE_FIELD);
+ results = query.findPages();
+ printResults(results);
+ assertEquals(4, results.size());
}
@Test
public void indexOnNonExistFields() throws InterruptedException,
LuceneQueryException {
// define index on nested objects
luceneService.createIndexFactory().setLuceneSerializer(new
FlatFormatSerializer())
-
.addField("name").addField("contac").addField("contact.page").addField("contact.missing")
+
.addField("name").addField("contact").addField("contact.page").addField("contact.missing")
.addField("missing2").create(INDEX_NAME, REGION_NAME);
Region region = createRegion(REGION_NAME, RegionShortcut.PARTITION);
--
To stop receiving notification emails like this one, please contact
['"[email protected]" <[email protected]>'].