diff --git a/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/HollowListTypeMapper.java b/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/HollowListTypeMapper.java index 3924894832..c3f80379d7 100644 --- a/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/HollowListTypeMapper.java +++ b/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/HollowListTypeMapper.java @@ -27,6 +27,9 @@ import com.netflix.hollow.core.write.HollowWriteRecord; import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecordReader; import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecordWriter; +import com.netflix.hollow.core.write.objectmapper.flatrecords.traversal.FlatRecordTraversalListNode; +import com.netflix.hollow.core.write.objectmapper.flatrecords.traversal.FlatRecordTraversalNode; + import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.util.ArrayList; @@ -139,6 +142,18 @@ protected Object parseFlatRecord(HollowSchema recordSchema, FlatRecordReader rea return collection; } + @Override + protected Object parseFlatRecordTraversalNode(FlatRecordTraversalNode node) { + List collection = new ArrayList<>(); + + for (FlatRecordTraversalNode elementNode : (FlatRecordTraversalListNode) node) { + Object element = elementMapper.parseFlatRecordTraversalNode(elementNode); + collection.add(element); + } + + return collection; + } + @Override protected HollowWriteRecord newWriteRecord() { return new HollowListWriteRecord(); diff --git a/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/HollowMapTypeMapper.java b/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/HollowMapTypeMapper.java index 2e7e3c1203..623a37b4de 100644 --- a/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/HollowMapTypeMapper.java +++ b/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/HollowMapTypeMapper.java @@ -28,6 +28,9 @@ import com.netflix.hollow.core.write.HollowWriteStateEngine; import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecordReader; import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecordWriter; +import com.netflix.hollow.core.write.objectmapper.flatrecords.traversal.FlatRecordTraversalMapNode; +import com.netflix.hollow.core.write.objectmapper.flatrecords.traversal.FlatRecordTraversalNode; + import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.util.HashMap; @@ -156,6 +159,19 @@ protected Object parseFlatRecord(HollowSchema recordSchema, FlatRecordReader rea return collection; } + @Override + protected Object parseFlatRecordTraversalNode(FlatRecordTraversalNode node) { + FlatRecordTraversalMapNode mapNode = (FlatRecordTraversalMapNode) node; + Map collection = new HashMap<>(); + + for (Map.Entry entry : mapNode.entrySet()) { + Object key = keyMapper.parseFlatRecordTraversalNode(entry.getKey()); + Object value = valueMapper.parseFlatRecordTraversalNode(entry.getValue()); + collection.put(key, value); + } + return collection; + } + @Override protected HollowWriteRecord newWriteRecord() { return new HollowMapWriteRecord(); diff --git a/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/HollowObjectMapper.java b/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/HollowObjectMapper.java index 7eed923dcc..a8e8f1ed6b 100644 --- a/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/HollowObjectMapper.java +++ b/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/HollowObjectMapper.java @@ -22,6 +22,9 @@ import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecord; import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecordReader; import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecordWriter; +import com.netflix.hollow.core.write.objectmapper.flatrecords.traversal.FlatRecordTraversalNode; +import com.netflix.hollow.core.write.objectmapper.flatrecords.traversal.FlatRecordTraversalObjectNode; + import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.util.HashMap; @@ -91,6 +94,16 @@ public void writeFlat(Object o, FlatRecordWriter flatRecordWriter) { typeMapper.writeFlat(o, flatRecordWriter); } + public T readFlat(FlatRecordTraversalNode node) { + String schemaName = node.getSchema().getName(); + HollowTypeMapper typeMapper = typeMappers.get(schemaName); + if (typeMapper == null) { + throw new IllegalArgumentException("No type mapper found for schema " + schemaName); + } + Object obj = typeMapper.parseFlatRecordTraversalNode(node); + return (T) obj; + } + public T readFlat(FlatRecord record) { FlatRecordReader recordReader = new FlatRecordReader(record); diff --git a/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/HollowObjectTypeMapper.java b/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/HollowObjectTypeMapper.java index d59ab2401a..9856630630 100644 --- a/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/HollowObjectTypeMapper.java +++ b/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/HollowObjectTypeMapper.java @@ -39,6 +39,9 @@ import java.util.List; import java.util.Map; import java.util.Set; + +import com.netflix.hollow.core.write.objectmapper.flatrecords.traversal.FlatRecordTraversalNode; +import com.netflix.hollow.core.write.objectmapper.flatrecords.traversal.FlatRecordTraversalObjectNode; import sun.misc.Unsafe; @SuppressWarnings("restriction") @@ -290,6 +293,49 @@ protected Object parseFlatRecord(HollowSchema recordSchema, FlatRecordReader rea throw new RuntimeException(ex); } } + @Override + protected Object parseFlatRecordTraversalNode(FlatRecordTraversalNode node) { + try { + FlatRecordTraversalObjectNode objectNode = (FlatRecordTraversalObjectNode) node; + HollowObjectSchema recordObjectSchema = objectNode.getSchema(); + + Object obj = null; + if (BOXED_WRAPPERS.contains(clazz)) { + // if `clazz` is a BoxedWrapper then by definition its OBJECT schema will have a single primitive + // field so find it in the FlatRecord and ignore all other fields. + for (int i = 0; i < recordObjectSchema.numFields(); i++) { + int posInPojoSchema = schema.getPosition(recordObjectSchema.getFieldName(i)); + if (posInPojoSchema != -1) { + obj = mappedFields.get(posInPojoSchema).parseBoxedWrapper(objectNode); + } + } + } else if (clazz.isEnum()) { + // if `clazz` is an enum, then we should expect to find a field called `_name` in the FlatRecord. + // There may be other fields if the producer enum contained custom properties, we ignore them + // here assuming the enum constructor will set them if needed. + for (int i = 0; i < recordObjectSchema.numFields(); i++) { + String fieldName = recordObjectSchema.getFieldName(i); + int posInPojoSchema = schema.getPosition(fieldName); + if (fieldName.equals(MappedFieldType.ENUM_NAME.getSpecialFieldName()) && posInPojoSchema != -1) { + obj = mappedFields.get(posInPojoSchema).parseBoxedWrapper(objectNode); + } + } + } else { + obj = unsafe.allocateInstance(clazz); + for (int i = 0; i < recordObjectSchema.numFields(); i++) { + int posInPojoSchema = schema.getPosition(recordObjectSchema.getFieldName(i)); + if (posInPojoSchema != -1) { + mappedFields.get(posInPojoSchema).parse(obj, objectNode); + } + } + } + + return obj; + } + catch(Exception ex) { + throw new RuntimeException(ex); + } + } Object[] extractPrimaryKey(Object obj) { int[][] primaryKeyFieldPathIdx = this.primaryKeyFieldPathIdx; @@ -849,6 +895,220 @@ private Object parseBoxedWrapper(FlatRecordReader reader) { return null; } + private Object parseBoxedWrapper(FlatRecordTraversalObjectNode record) { + switch (fieldType) { + case BOOLEAN: + return record.getFieldValueBooleanBoxed(fieldName); + case INT: + return record.getFieldValueIntBoxed(fieldName); + case SHORT: + int shortValue = record.getFieldValueInt(fieldName); + if (shortValue == Integer.MIN_VALUE) { + return null; + } + return Short.valueOf((short) shortValue); + case BYTE: + int byteValue = record.getFieldValueInt(fieldName); + if (byteValue == Integer.MIN_VALUE) { + return null; + } + return Byte.valueOf((byte) byteValue); + case CHAR: + int charValue = record.getFieldValueInt(fieldName); + if (charValue == Integer.MIN_VALUE) { + return null; + } + return Character.valueOf((char) charValue); + case LONG: + return record.getFieldValueLongBoxed(fieldName); + case FLOAT: + return record.getFieldValueFloatBoxed(fieldName); + case DOUBLE: + return record.getFieldValueDoubleBoxed(fieldName); + case STRING: + return record.getFieldValueString(fieldName); + case BYTES: + return record.getFieldValueBytes(fieldName); + case ENUM_NAME: + String enumName = record.getFieldValueString(fieldName); + if (enumName == null) { + return null; + } + return Enum.valueOf((Class) clazz, enumName); + case DATE_TIME: { + long dateValue = record.getFieldValueLong(fieldName); + if (dateValue == Long.MIN_VALUE) { + return null; + } + return new Date(dateValue); + } + default: + throw new IllegalArgumentException("Unexpected field type " + fieldType + " for field " + fieldName); + } + } + + private void parse(Object obj, FlatRecordTraversalObjectNode node) { + switch(fieldType) { + case BOOLEAN: { + Boolean value = node.getFieldValueBooleanBoxed(fieldName); + if (value != null) { + unsafe.putBoolean(obj, fieldOffset, value == Boolean.TRUE); + } + break; + } + case INT: { + int value = node.getFieldValueInt(fieldName); + if (value != Integer.MIN_VALUE) { + unsafe.putInt(obj, fieldOffset, value); + } + break; + } + case SHORT: { + int value = node.getFieldValueInt(fieldName); + if (value != Integer.MIN_VALUE) { + unsafe.putShort(obj, fieldOffset, (short) value); + } + break; + } + case BYTE: { + int value = node.getFieldValueInt(fieldName); + if (value != Integer.MIN_VALUE) { + unsafe.putByte(obj, fieldOffset, (byte) value); + } + break; + } + case CHAR: { + int value = node.getFieldValueInt(fieldName); + if (value != Integer.MIN_VALUE) { + unsafe.putChar(obj, fieldOffset, (char) value); + } + break; + } + case LONG: { + long value = node.getFieldValueLong(fieldName); + if (value != Long.MIN_VALUE) { + unsafe.putLong(obj, fieldOffset, value); + } + break; + } + case FLOAT: { + float value = node.getFieldValueFloat(fieldName); + if (!Float.isNaN(value)) { + unsafe.putFloat(obj, fieldOffset, value); + } + break; + } + case DOUBLE: { + double value = node.getFieldValueDouble(fieldName); + if (!Double.isNaN(value)) { + unsafe.putDouble(obj, fieldOffset, value); + } + break; + } + case STRING: { + String value = node.getFieldValueString(fieldName); + if (value != null) { + unsafe.putObject(obj, fieldOffset, value); + } + break; + } + case BYTES: { + byte[] value = node.getFieldValueBytes(fieldName); + if (value != null) { + unsafe.putObject(obj, fieldOffset, value); + } + break; + } + case INLINED_BOOLEAN: { + Boolean value = node.getFieldValueBooleanBoxed(fieldName); + if (value != null) { + unsafe.putObject(obj, fieldOffset, value); + } + break; + } + case INLINED_INT: { + int value = node.getFieldValueInt(fieldName); + if (value != Integer.MIN_VALUE) { + unsafe.putObject(obj, fieldOffset, Integer.valueOf(value)); + } + break; + } + case INLINED_SHORT: { + int value = node.getFieldValueInt(fieldName); + if (value != Integer.MIN_VALUE) { + unsafe.putObject(obj, fieldOffset, Short.valueOf((short) value)); + } + break; + } + case INLINED_BYTE: { + int value = node.getFieldValueInt(fieldName); + if (value != Integer.MIN_VALUE) { + unsafe.putObject(obj, fieldOffset, Byte.valueOf((byte) value)); + } + break; + } + case INLINED_CHAR: { + int value = node.getFieldValueInt(fieldName); + if (value != Integer.MIN_VALUE) { + unsafe.putObject(obj, fieldOffset, Character.valueOf((char) value)); + } + break; + } + case INLINED_LONG: { + long value = node.getFieldValueLong(fieldName); + if (value != Long.MIN_VALUE) { + unsafe.putObject(obj, fieldOffset, Long.valueOf(value)); + } + break; + } + case INLINED_FLOAT: { + float value = node.getFieldValueFloat(fieldName); + if (!Float.isNaN(value)) { + unsafe.putObject(obj, fieldOffset, Float.valueOf(value)); + } + break; + } + case INLINED_DOUBLE: { + double value = node.getFieldValueDouble(fieldName); + if (!Double.isNaN(value)) { + unsafe.putObject(obj, fieldOffset, Double.valueOf(value)); + } + break; + } + case INLINED_STRING: { + String value = node.getFieldValueString(fieldName); + if (value != null) { + unsafe.putObject(obj, fieldOffset, value); + } + break; + } + case DATE_TIME: { + long value = node.getFieldValueLong(fieldName); + if (value != Long.MIN_VALUE) { + unsafe.putObject(obj, fieldOffset, new Date(value)); + } + break; + } + case ENUM_NAME: { + String value = node.getFieldValueString(fieldName); + if (value != null) { + unsafe.putObject(obj, fieldOffset, Enum.valueOf((Class) type, value)); + } + break; + } + case REFERENCE: { + FlatRecordTraversalNode childNode = node.getFieldNode(fieldName); + if (childNode != null) { + unsafe.putObject(obj, fieldOffset, subTypeMapper.parseFlatRecordTraversalNode(childNode)); + } + break; + } + default: + throw new IllegalArgumentException("Unknown field type: " + fieldType); + } + } + + private void parse(Object obj, FlatRecordReader reader, Map parsedRecords) { switch(fieldType) { case BOOLEAN: { diff --git a/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/HollowSetTypeMapper.java b/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/HollowSetTypeMapper.java index a6ad9694e3..37bb1fcffa 100644 --- a/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/HollowSetTypeMapper.java +++ b/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/HollowSetTypeMapper.java @@ -28,9 +28,13 @@ import com.netflix.hollow.core.write.HollowWriteStateEngine; import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecordReader; import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecordWriter; +import com.netflix.hollow.core.write.objectmapper.flatrecords.traversal.FlatRecordTraversalNode; +import com.netflix.hollow.core.write.objectmapper.flatrecords.traversal.FlatRecordTraversalSetNode; + import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.util.HashSet; +import java.util.Iterator; import java.util.Map; import java.util.Set; @@ -132,6 +136,15 @@ protected Object parseFlatRecord(HollowSchema recordSchema, FlatRecordReader rea return collection; } + @Override + protected Object parseFlatRecordTraversalNode(FlatRecordTraversalNode node) { + Set collection = new HashSet<>(); + for (FlatRecordTraversalNode elementNode : (FlatRecordTraversalSetNode) node) { + collection.add(elementMapper.parseFlatRecordTraversalNode(elementNode)); + } + return collection; + } + @Override protected HollowWriteRecord newWriteRecord() { return new HollowSetWriteRecord(); diff --git a/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/HollowTypeMapper.java b/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/HollowTypeMapper.java index c6d8078995..6bf6d1561e 100644 --- a/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/HollowTypeMapper.java +++ b/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/HollowTypeMapper.java @@ -24,6 +24,8 @@ import com.netflix.hollow.core.write.HollowWriteStateEngine; import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecordReader; import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecordWriter; +import com.netflix.hollow.core.write.objectmapper.flatrecords.traversal.FlatRecordTraversalNode; + import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.util.List; @@ -47,6 +49,8 @@ public abstract class HollowTypeMapper { protected abstract Object parseHollowRecord(HollowRecord record); protected abstract Object parseFlatRecord(HollowSchema schema, FlatRecordReader reader, Map parsedObjects); + + protected abstract Object parseFlatRecordTraversalNode(FlatRecordTraversalNode node); protected abstract HollowWriteRecord newWriteRecord(); diff --git a/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/flatrecords/traversal/FlatRecordTraversalListNode.java b/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/flatrecords/traversal/FlatRecordTraversalListNode.java new file mode 100644 index 0000000000..aacb936c99 --- /dev/null +++ b/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/flatrecords/traversal/FlatRecordTraversalListNode.java @@ -0,0 +1,92 @@ +package com.netflix.hollow.core.write.objectmapper.flatrecords.traversal; + +import com.netflix.hollow.core.schema.HollowListSchema; +import com.netflix.hollow.core.schema.HollowObjectSchema; +import com.netflix.hollow.core.util.IntList; +import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecordReader; + +import java.util.AbstractList; +import java.util.Map; + +public class FlatRecordTraversalListNode extends AbstractList implements FlatRecordTraversalNode { + private FlatRecordReader reader; + private IntList ordinalPositions; + private HollowListSchema schema; + private int[] elementOrdinals; + private Map commonSchemaMap; + + @Override + public void reposition(FlatRecordReader reader, IntList ordinalPositions, int ordinal) { + this.reader = reader; + this.ordinalPositions = ordinalPositions; + + reader.resetTo(ordinalPositions.get(ordinal)); + schema = (HollowListSchema) reader.readSchema(); + + int size = reader.readCollectionSize(); + elementOrdinals = new int[size]; + for (int i = 0; i < size; i++) { + elementOrdinals[i] = reader.readOrdinal(); + } + } + + @Override + public void setCommonSchema(Map commonSchema) { + this.commonSchemaMap = commonSchema; + } + + @Override + public int hashCode() { + int hashCode = 1; + for (FlatRecordTraversalNode e : this) { + FlatRecordTraversalObjectNode objectNode = (FlatRecordTraversalObjectNode) e; + if (objectNode != null && commonSchemaMap.containsKey(objectNode.getSchema().getName())) { + objectNode.setCommonSchema(commonSchemaMap); + hashCode = 31 * hashCode + objectNode.hashCode(); + } + else if (objectNode == null) { + hashCode = 31 * hashCode; + } + } + return hashCode; + } + + + @Override + public HollowListSchema getSchema() { + return schema; + } + + public FlatRecordTraversalObjectNode getObject(int index) { + return (FlatRecordTraversalObjectNode) get(index); + } + + public FlatRecordTraversalListNode getList(int index) { + return (FlatRecordTraversalListNode) get(index); + } + + public FlatRecordTraversalSetNode getSet(int index) { + return (FlatRecordTraversalSetNode) get(index); + } + + public FlatRecordTraversalMapNode getMap(int index) { + return (FlatRecordTraversalMapNode) get(index); + } + + @Override + public FlatRecordTraversalNode get(int index) { + if (index >= elementOrdinals.length) { + throw new IndexOutOfBoundsException("Index: " + index + ", Size: " + elementOrdinals.length); + } + int elementOrdinal = elementOrdinals[index]; + if (elementOrdinal == -1) { + return null; + } + return createAndRepositionNode(reader, ordinalPositions, elementOrdinal); + } + + @Override + public int size() { + return elementOrdinals.length; + } +} diff --git a/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/flatrecords/traversal/FlatRecordTraversalMapNode.java b/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/flatrecords/traversal/FlatRecordTraversalMapNode.java new file mode 100644 index 0000000000..e35b019cba --- /dev/null +++ b/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/flatrecords/traversal/FlatRecordTraversalMapNode.java @@ -0,0 +1,162 @@ +package com.netflix.hollow.core.write.objectmapper.flatrecords.traversal; + +import com.netflix.hollow.core.schema.HollowMapSchema; +import com.netflix.hollow.core.schema.HollowObjectSchema; +import com.netflix.hollow.core.util.IntList; +import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecordReader; + +import java.util.AbstractMap; +import java.util.AbstractSet; +import java.util.Arrays; +import java.util.Comparator; +import java.util.Iterator; +import java.util.Map; +import java.util.Objects; +import java.util.Set; + +public class FlatRecordTraversalMapNode extends AbstractMap implements FlatRecordTraversalNode { + private FlatRecordReader reader; + private IntList ordinalPositions; + private HollowMapSchema schema; + private int[] keyOrdinals; + private int[] valueOrdinals; + private Map commonSchemaMap; + + @Override + public void reposition(FlatRecordReader reader, IntList ordinalPositions, int ordinal) { + this.reader = reader; + this.ordinalPositions = ordinalPositions; + + reader.resetTo(ordinalPositions.get(ordinal)); + schema = (HollowMapSchema) reader.readSchema(); + + int size = reader.readCollectionSize(); + keyOrdinals = new int[size]; + valueOrdinals = new int[size]; + int keyOrdinal = 0; + for (int i = 0; i < size; i++) { + keyOrdinal += reader.readOrdinal(); + keyOrdinals[i] = keyOrdinal; + valueOrdinals[i] = reader.readOrdinal(); + } + } + + @Override + public void setCommonSchema(Map commonSchema) { + this.commonSchemaMap = commonSchema; + } + + @Override + public int hashCode() { + int h = 0; + Iterator> i = entrySet().iterator(); + while (i.hasNext()) { + Entry e = i.next(); + FlatRecordTraversalNode key = e.getKey(); + FlatRecordTraversalNode value = e.getValue(); + if(commonSchemaMap.containsKey(key.getSchema().getName())) { + key.setCommonSchema(commonSchemaMap); + h += (key == null ? 0 : key.hashCode()); + } + if(commonSchemaMap.containsKey(value.getSchema().getName())) { + value.setCommonSchema(commonSchemaMap); + h += (value == null ? 0 : value.hashCode()); + } + } + return h; + } + + @Override + public HollowMapSchema getSchema() { + return schema; + } + + @Override + public Set> entrySet() { + return new AbstractSet>() { + @Override + public Iterator> iterator() { + return new EntrySetIteratorImpl<>(); + } + + @Override + public int size() { + return keyOrdinals.length; + } + }; + } + + public Iterator> entrySetIterator() { + return new EntrySetIteratorImpl<>(); + } + + private class EntrySetIteratorImpl implements Iterator> { + private int index = 0; + + @Override + public boolean hasNext() { + return index < keyOrdinals.length; + } + + @Override + public Entry next() { + if (index >= keyOrdinals.length) { + throw new IllegalStateException("No more elements"); + } + + int keyOrdinal = keyOrdinals[index]; + int valueOrdinal = valueOrdinals[index]; + index++; + + return new Entry() { + @Override + public K getKey() { + if (keyOrdinal == -1) { + return null; + } + return (K) createAndRepositionNode(reader, ordinalPositions, keyOrdinal); + } + + @Override + public V getValue() { + if (valueOrdinal == -1) { + return null; + } + return (V) createAndRepositionNode(reader, ordinalPositions, valueOrdinal); + } + + @Override + public V setValue(V value) { + throw new UnsupportedOperationException(); + } + }; + } + } + private static class MapEntry { + private final int key; + private final int value; + + public MapEntry(int key, int value) { + this.key = key; + this.value = value; + } + + @Override + public boolean equals(Object o) { + if (o == this) return true; + if (!(o instanceof MapEntry)) return false; + MapEntry other = (MapEntry) o; + return key == other.key && value == other.value; + } + + @Override + public int hashCode() { + return Objects.hash(key, value); + } + + @Override + public String toString() { + return "MapEntry(" + key + ", " + value + ")"; + } + } +} diff --git a/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/flatrecords/traversal/FlatRecordTraversalNode.java b/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/flatrecords/traversal/FlatRecordTraversalNode.java new file mode 100644 index 0000000000..9a4dd05b71 --- /dev/null +++ b/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/flatrecords/traversal/FlatRecordTraversalNode.java @@ -0,0 +1,45 @@ +package com.netflix.hollow.core.write.objectmapper.flatrecords.traversal; + +import com.netflix.hollow.core.schema.HollowObjectSchema; +import com.netflix.hollow.core.schema.HollowSchema; +import com.netflix.hollow.core.util.IntList; +import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecordReader; + +import java.util.Map; + +/** + * An abstraction that allows for the traversal of a flat record from the root type to a specific sub-path. + */ +public interface FlatRecordTraversalNode { + HollowSchema getSchema(); + + void setCommonSchema(Map commonSchema); + + void reposition(FlatRecordReader reader, IntList ordinalPositions, int ordinal); + + default FlatRecordTraversalNode createAndRepositionNode(FlatRecordReader reader, IntList ordinalPositions, int ordinal) { + reader.pointer = ordinalPositions.get(ordinal); + HollowSchema schema = reader.readSchema(); + + FlatRecordTraversalNode node; + switch (schema.getSchemaType()) { + case OBJECT: + node = new FlatRecordTraversalObjectNode(); + break; + case LIST: + node = new FlatRecordTraversalListNode(); + break; + case SET: + node = new FlatRecordTraversalSetNode(); + break; + case MAP: + node = new FlatRecordTraversalMapNode(); + break; + default: + throw new IllegalArgumentException("Unsupported schema type: " + schema.getSchemaType()); + } + + node.reposition(reader, ordinalPositions, ordinal); + return node; + } +} \ No newline at end of file diff --git a/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/flatrecords/traversal/FlatRecordTraversalObjectNode.java b/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/flatrecords/traversal/FlatRecordTraversalObjectNode.java new file mode 100644 index 0000000000..b022c08b8d --- /dev/null +++ b/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/flatrecords/traversal/FlatRecordTraversalObjectNode.java @@ -0,0 +1,279 @@ +package com.netflix.hollow.core.write.objectmapper.flatrecords.traversal; + +import com.netflix.hollow.core.schema.HollowObjectSchema; +import com.netflix.hollow.core.schema.HollowSchema; +import com.netflix.hollow.core.util.IntList; +import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecord; +import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecordReader; + +import java.util.Arrays; +import java.util.Map; +import java.util.Objects; + +public class FlatRecordTraversalObjectNode implements FlatRecordTraversalNode { + private FlatRecordReader reader; + private IntList ordinalPositions; + private HollowObjectSchema schema; + private Map commonSchemaMap; + private int position; + + public FlatRecordTraversalObjectNode() {} + + public FlatRecordTraversalObjectNode(FlatRecord rec) { + FlatRecordReader reader = new FlatRecordReader(rec); + + IntList ordinalPositions = new IntList(); + while (reader.hasMore()) { + ordinalPositions.add(reader.pointer); + HollowSchema schema = reader.readSchema(); + reader.skipSchema(schema); + } + + reposition(reader, ordinalPositions, ordinalPositions.size() - 1); + } + + @Override + public void reposition(FlatRecordReader reader, IntList ordinalPositions, int ordinal) { + this.reader = reader; + this.ordinalPositions = ordinalPositions; + + reader.resetTo(ordinalPositions.get(ordinal)); + schema = (HollowObjectSchema) reader.readSchema(); + position = reader.pointer; + } + + @Override + public HollowObjectSchema getSchema() { + return schema; + } + + @Override + public void setCommonSchema(Map commonSchema) { + this.commonSchemaMap = commonSchema; + } + + public FlatRecordTraversalObjectNode getObjectFieldNode(String field) { + return (FlatRecordTraversalObjectNode) getFieldNode(field); + } + + public FlatRecordTraversalListNode getListFieldNode(String field) { + return (FlatRecordTraversalListNode) getFieldNode(field); + } + + public FlatRecordTraversalSetNode getSetFieldNode(String field) { + return (FlatRecordTraversalSetNode) getFieldNode(field); + } + + public FlatRecordTraversalMapNode getMapFieldNode(String field) { + return (FlatRecordTraversalMapNode) getFieldNode(field); + } + + public FlatRecordTraversalNode getFieldNode(String field) { + if (!skipToField(field)) { + return null; + } + + if (schema.getFieldType(field) != HollowObjectSchema.FieldType.REFERENCE) { + throw new IllegalStateException("Cannot get child for non-reference field"); + } + + int refOrdinal = reader.readOrdinal(); + if (refOrdinal == -1) { + return null; + } + + return createAndRepositionNode(reader, ordinalPositions, refOrdinal); + } + + public Object getFieldValue(String field) { + if (!skipToField(field)) { + return null; + } + switch(schema.getFieldType(field)) { + case BOOLEAN: + return reader.readBoolean(); + case INT: + return reader.readInt(); + case LONG: + return reader.readLong(); + case FLOAT: + return reader.readFloat(); + case DOUBLE: + return reader.readDouble(); + case STRING: + return reader.readString(); + case BYTES: + return reader.readBytes(); + case REFERENCE: + throw new IllegalStateException("Cannot get leaf value for reference field"); + } + return null; + } + + public boolean getFieldValueBoolean(String field) { + if (!skipToField(field)) { + return false; + } + assertFieldType(field, HollowObjectSchema.FieldType.BOOLEAN); + return reader.readBoolean(); + } + + public Boolean getFieldValueBooleanBoxed(String field) { + return getFieldValueBoolean(field); + } + + public int getFieldValueInt(String field) { + if (!skipToField(field)) { + return Integer.MIN_VALUE; + } + assertFieldType(field, HollowObjectSchema.FieldType.INT); + return reader.readInt(); + } + + public Integer getFieldValueIntBoxed(String field) { + int value = getFieldValueInt(field); + if (value == Integer.MIN_VALUE) { + return null; + } + return value; + } + + public long getFieldValueLong(String field) { + if (!skipToField(field)) { + return Long.MIN_VALUE; + } + assertFieldType(field, HollowObjectSchema.FieldType.LONG); + return reader.readLong(); + } + + public Long getFieldValueLongBoxed(String field) { + long value = getFieldValueLong(field); + if (value == Long.MIN_VALUE) { + return null; + } + return value; + } + + public float getFieldValueFloat(String field) { + if (!skipToField(field)) { + return Float.NaN; + } + assertFieldType(field, HollowObjectSchema.FieldType.FLOAT); + return reader.readFloat(); + } + + public Float getFieldValueFloatBoxed(String field) { + float value = getFieldValueFloat(field); + if (Float.isNaN(value)) { + return null; + } + return value; + } + + public double getFieldValueDouble(String field) { + if (!skipToField(field)) { + return Double.NaN; + } + assertFieldType(field, HollowObjectSchema.FieldType.DOUBLE); + return reader.readDouble(); + } + + public Double getFieldValueDoubleBoxed(String field) { + double value = getFieldValueDouble(field); + if (Double.isNaN(value)) { + return null; + } + return value; + } + + public String getFieldValueString(String field) { + if (!skipToField(field)) { + return null; + } + assertFieldType(field, HollowObjectSchema.FieldType.STRING); + return reader.readString(); + } + + public byte[] getFieldValueBytes(String field) { + if (!skipToField(field)) { + return null; + } + assertFieldType(field, HollowObjectSchema.FieldType.BYTES); + return reader.readBytes(); + } + + @Override + public int hashCode() { + HollowObjectSchema commonSchema = commonSchemaMap.get(schema.getName()); + Object[] fields = new Object[commonSchema.numFields()]; + for(int i=0;i commonSchemaCache = new HashMap<>(); + + public static boolean equals(FlatRecordTraversalObjectNode left, FlatRecordTraversalObjectNode right) { + if (left == null && right == null) { + return true; + } + if (left == null || right == null) { + return false; + } + if (!left.getSchema().getName().equals(right.getSchema().getName())) { + return false; + } + extractCommonObjectSchema(left, right); + + return compare(left, right); + } + + private static boolean compare(FlatRecordTraversalNode left, FlatRecordTraversalNode right) { + if(left == null && right == null) { + return true; + } + if(left == null || right == null) { + return false; + } + if(!left.getSchema().getName().equals(right.getSchema().getName())) { + return false; + } + left.setCommonSchema(commonSchemaCache); + right.setCommonSchema(commonSchemaCache); + if(left instanceof FlatRecordTraversalObjectNode && right instanceof FlatRecordTraversalObjectNode) { + FlatRecordTraversalObjectNode leftObjectNode = (FlatRecordTraversalObjectNode) left; + FlatRecordTraversalObjectNode rightObjectNode = (FlatRecordTraversalObjectNode) right; + if(leftObjectNode.hashCode() != rightObjectNode.hashCode()) { + return false; + } + for(int i=0;i leftIterator = leftSetNode.iterator(); + Iterator rightIterator = rightSetNode.iterator(); + if (leftIterator.hasNext() && rightIterator.hasNext()) { + FlatRecordTraversalNode leftChildNode = leftIterator.next(); + FlatRecordTraversalNode rightChildNode = rightIterator.next(); + extractCommonObjectSchema(leftChildNode, rightChildNode); + } + } + else if (left instanceof FlatRecordTraversalListNode && right instanceof FlatRecordTraversalListNode) { + FlatRecordTraversalListNode leftListNode = (FlatRecordTraversalListNode) left; + FlatRecordTraversalListNode rightListNode = (FlatRecordTraversalListNode) right; + Iterator leftIterator = leftListNode.iterator(); + Iterator rightIterator = rightListNode.iterator(); + if (leftIterator.hasNext() && rightIterator.hasNext()) { + FlatRecordTraversalNode leftChildNode = leftIterator.next(); + FlatRecordTraversalNode rightChildNode = rightIterator.next(); + extractCommonObjectSchema(leftChildNode, rightChildNode); + } + } + else if (left instanceof FlatRecordTraversalMapNode && right instanceof FlatRecordTraversalMapNode) { + FlatRecordTraversalMapNode leftMapNode = (FlatRecordTraversalMapNode) left; + FlatRecordTraversalMapNode rightMapNode = (FlatRecordTraversalMapNode) right; + Iterator> leftIterator = leftMapNode.entrySet().iterator(); + Iterator> rightIterator = rightMapNode.entrySet().iterator(); + if (leftIterator.hasNext() && rightIterator.hasNext()) { + Map.Entry leftEntry = leftIterator.next(); + Map.Entry rightEntry = rightIterator.next(); + extractCommonObjectSchema(leftEntry.getKey(), rightEntry.getKey()); + extractCommonObjectSchema(leftEntry.getValue(), rightEntry.getValue()); + } + } + } + +} diff --git a/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/flatrecords/traversal/FlatRecordTraversalSetNode.java b/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/flatrecords/traversal/FlatRecordTraversalSetNode.java new file mode 100644 index 0000000000..1cd7286361 --- /dev/null +++ b/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/flatrecords/traversal/FlatRecordTraversalSetNode.java @@ -0,0 +1,106 @@ +package com.netflix.hollow.core.write.objectmapper.flatrecords.traversal; + +import com.netflix.hollow.core.schema.HollowObjectSchema; +import com.netflix.hollow.core.schema.HollowSetSchema; +import com.netflix.hollow.core.util.IntList; +import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecordReader; + +import java.util.AbstractSet; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; + +public class FlatRecordTraversalSetNode extends AbstractSet implements FlatRecordTraversalNode { + private FlatRecordReader reader; + private IntList ordinalPositions; + private HollowSetSchema schema; + private int[] elementOrdinals; + + private Map commonSchemaMap; + @Override + public void reposition(FlatRecordReader reader, IntList ordinalPositions, int ordinal) { + this.reader = reader; + this.ordinalPositions = ordinalPositions; + + reader.resetTo(ordinalPositions.get(ordinal)); + schema = (HollowSetSchema) reader.readSchema(); + + int size = reader.readCollectionSize(); + elementOrdinals = new int[size]; + int elementOrdinal = 0; + for (int i = 0; i < size; i++) { + elementOrdinal += reader.readOrdinal(); + elementOrdinals[i] = elementOrdinal; + } + } + + @Override + public void setCommonSchema(Map commonSchema) { + this.commonSchemaMap = commonSchema; + } + @Override + public HollowSetSchema getSchema() { + return schema; + } + + @Override + public int hashCode() { + int h = 0; + Iterator i = iterator(); + while (i.hasNext()) { + FlatRecordTraversalNode obj = i.next(); + if (obj != null && commonSchemaMap.containsKey(obj.getSchema().getName())) + obj.setCommonSchema(commonSchemaMap); + h += obj.hashCode(); + } + return h; + } + public Iterator objects() { + return new IteratorImpl<>(); + } + + public Iterator lists() { + return new IteratorImpl<>(); + } + + public Iterator sets() { + return new IteratorImpl<>(); + } + + public Iterator maps() { + return new IteratorImpl<>(); + } + + @Override + public Iterator iterator() { + return new IteratorImpl<>(); + } + + @Override + public int size() { + return elementOrdinals.length; + } + + private class IteratorImpl implements Iterator { + private int index = 0; + + @Override + public boolean hasNext() { + return index < elementOrdinals.length; + } + + @Override + public T next() { + int elementOrdinal = elementOrdinals[index++]; + if (elementOrdinal == -1) { + return null; + } + return (T) createAndRepositionNode(reader, ordinalPositions, elementOrdinal); + } + } +} diff --git a/hollow/src/test/java/com/netflix/hollow/core/write/objectmapper/HollowObjectMapperFlatRecordTraversalNodeParserTest.java b/hollow/src/test/java/com/netflix/hollow/core/write/objectmapper/HollowObjectMapperFlatRecordTraversalNodeParserTest.java new file mode 100644 index 0000000000..caa4f8e99f --- /dev/null +++ b/hollow/src/test/java/com/netflix/hollow/core/write/objectmapper/HollowObjectMapperFlatRecordTraversalNodeParserTest.java @@ -0,0 +1,722 @@ +package com.netflix.hollow.core.write.objectmapper; + +import com.netflix.hollow.core.write.HollowWriteStateEngine; +import com.netflix.hollow.core.write.objectmapper.flatrecords.FakeHollowSchemaIdentifierMapper; +import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecord; +import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecordWriter; +import com.netflix.hollow.core.write.objectmapper.flatrecords.traversal.FlatRecordTraversalObjectNode; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +import java.util.Arrays; +import java.util.Base64; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; +public class HollowObjectMapperFlatRecordTraversalNodeParserTest { + private HollowObjectMapper mapper; + private FlatRecordWriter flatRecordWriter; + + @Before + public void setUp() { + mapper = new HollowObjectMapper(new HollowWriteStateEngine()); + mapper.initializeTypeState(TypeWithAllSimpleTypes.class); + mapper.initializeTypeState(InternalTypeA.class); + mapper.initializeTypeState(TypeWithCollections.class); + mapper.initializeTypeState(VersionedType2.class); + mapper.initializeTypeState(SpecialWrapperTypesTest.class); + flatRecordWriter = new FlatRecordWriter( + mapper.getStateEngine(), new FakeHollowSchemaIdentifierMapper(mapper.getStateEngine())); + } + + @Test + public void testSpecialWrapperTypes() { + SpecialWrapperTypesTest wrapperTypesTest = new SpecialWrapperTypesTest(); + wrapperTypesTest.id = 8797182L; + wrapperTypesTest.type = AnEnum.SOME_VALUE_C; + wrapperTypesTest.complexEnum = ComplexEnum.SOME_VALUE_A; + wrapperTypesTest.dateCreated = new Date(); + + flatRecordWriter.reset(); + mapper.writeFlat(wrapperTypesTest, flatRecordWriter); + FlatRecord fr = flatRecordWriter.generateFlatRecord(); + + SpecialWrapperTypesTest result = mapper.readFlat(new FlatRecordTraversalObjectNode(fr)); + + Assert.assertEquals(wrapperTypesTest, result); + Assert.assertEquals(wrapperTypesTest.complexEnum.value, result.complexEnum.value); + Assert.assertEquals(wrapperTypesTest.complexEnum.anotherValue, result.complexEnum.anotherValue); + } + + @Test + public void testSimpleTypes() { + TypeWithAllSimpleTypes typeWithAllSimpleTypes = new TypeWithAllSimpleTypes(); + typeWithAllSimpleTypes.boxedIntegerField = 1; + typeWithAllSimpleTypes.boxedBooleanField = true; + typeWithAllSimpleTypes.boxedDoubleField = 1.0; + typeWithAllSimpleTypes.boxedFloatField = 1.0f; + typeWithAllSimpleTypes.boxedLongField = 1L; + typeWithAllSimpleTypes.boxedShortField = (short) 1; + typeWithAllSimpleTypes.boxedByteField = (byte) 1; + typeWithAllSimpleTypes.boxedCharField = 'a'; + typeWithAllSimpleTypes.primitiveIntegerField = 2; + typeWithAllSimpleTypes.primitiveBooleanField = false; + typeWithAllSimpleTypes.primitiveDoubleField = 2.0; + typeWithAllSimpleTypes.primitiveFloatField = 2.0f; + typeWithAllSimpleTypes.primitiveLongField = 2L; + typeWithAllSimpleTypes.primitiveShortField = (short) 2; + typeWithAllSimpleTypes.primitiveByteField = (byte) 2; + typeWithAllSimpleTypes.primitiveCharField = 'b'; + typeWithAllSimpleTypes.byteArrayField = new byte[]{1, 2, 3}; + typeWithAllSimpleTypes.stringField = "string"; + typeWithAllSimpleTypes.inlinedIntegerField = 3; + typeWithAllSimpleTypes.inlinedBooleanField = true; + typeWithAllSimpleTypes.inlinedDoubleField = 3.0; + typeWithAllSimpleTypes.inlinedFloatField = 3.0f; + typeWithAllSimpleTypes.inlinedLongField = 3L; + typeWithAllSimpleTypes.inlinedShortField = (short) 3; + typeWithAllSimpleTypes.inlinedByteField = (byte) 3; + typeWithAllSimpleTypes.inlinedCharField = 'c'; + typeWithAllSimpleTypes.inlinedStringField = "inlinedstring"; + typeWithAllSimpleTypes.namedIntegerField = 4; + typeWithAllSimpleTypes.namedBooleanField = false; + typeWithAllSimpleTypes.namedDoubleField = 4.0; + typeWithAllSimpleTypes.namedFloatField = 4.0f; + typeWithAllSimpleTypes.namedLongField = 4L; + typeWithAllSimpleTypes.namedShortField = (short) 4; + typeWithAllSimpleTypes.namedByteField = (byte) 4; + typeWithAllSimpleTypes.namedCharField = 'd'; + typeWithAllSimpleTypes.namedByteArrayField = new byte[]{2, 4, 6}; + typeWithAllSimpleTypes.namedStringField = "namedstring"; + typeWithAllSimpleTypes.internalTypeAField = new InternalTypeA(1, "name"); + typeWithAllSimpleTypes.internalTypeCField = new InternalTypeC("data"); + + flatRecordWriter.reset(); + mapper.writeFlat(typeWithAllSimpleTypes, flatRecordWriter); + FlatRecord fr = flatRecordWriter.generateFlatRecord(); + + TypeWithAllSimpleTypes result = mapper.readFlat(new FlatRecordTraversalObjectNode(fr)); + + Assert.assertEquals(typeWithAllSimpleTypes, result); + } + + @Test + public void testCollections() { + TypeWithCollections type = new TypeWithCollections(); + type.id = 1; + type.stringList = Arrays.asList("a", "b", "c"); + type.stringSet = new HashSet<>(type.stringList); + type.integerStringMap = type.stringList.stream().collect( + Collectors.toMap( + s -> type.stringList.indexOf(s), + s -> s + ) + ); + type.internalTypeAList = Arrays.asList(new InternalTypeA(1), new InternalTypeA(2)); + type.internalTypeASet = new HashSet<>(type.internalTypeAList); + type.integerInternalTypeAMap = type.internalTypeAList.stream().collect( + Collectors.toMap( + b -> b.id, + b -> b + ) + ); + type.internalTypeAStringMap = type.internalTypeAList.stream().collect( + Collectors.toMap( + b -> b, + b -> b.name + ) + ); + type.multiTypeMap = new HashMap<>(); + type.multiTypeMap.put(new TypeA10(1, "1", 1L), new TypeC10(new byte[]{1, 2, 3})); + type.multiTypeMap.put(new TypeA10(2, "2", 2L), new TypeC10(new byte[]{4, 5, 6})); + type.multiTypeMap.put(new TypeA10(2, "2", 2L), new TypeC10(new byte[]{7, 8, 9})); + + flatRecordWriter.reset(); + mapper.writeFlat(type, flatRecordWriter); + FlatRecord fr = flatRecordWriter.generateFlatRecord(); + + TypeWithCollections result = mapper.readFlat(new FlatRecordTraversalObjectNode(fr)); + + + Assert.assertEquals(type, result); + } + + @Test + public void testMapFromVersionedTypes() { + HollowObjectMapper readerMapper = new HollowObjectMapper(new HollowWriteStateEngine()); + readerMapper.initializeTypeState(TypeWithAllSimpleTypes.class); + readerMapper.initializeTypeState(InternalTypeA.class); + readerMapper.initializeTypeState(TypeWithCollections.class); + readerMapper.initializeTypeState(VersionedType1.class); + + VersionedType2 versionedType2 = new VersionedType2(); + versionedType2.boxedIntegerField = 1; + versionedType2.internalTypeBField = new InternalTypeB(1); + versionedType2.charField = 'a'; + versionedType2.primitiveDoubleField = 1.0; + versionedType2.stringSet = new HashSet<>(Arrays.asList("a", "b", "c")); + + flatRecordWriter.reset(); + mapper.writeFlat(versionedType2, flatRecordWriter); + FlatRecord fr = flatRecordWriter.generateFlatRecord(); + + VersionedType1 result = readerMapper.readFlat(new FlatRecordTraversalObjectNode(fr)); + + Assert.assertEquals(null, result.stringField); // stringField is not present in VersionedType1 + Assert.assertEquals(versionedType2.boxedIntegerField, result.boxedIntegerField); + Assert.assertEquals(versionedType2.primitiveDoubleField, result.primitiveDoubleField, 0); + Assert.assertEquals(null, result.internalTypeAField); // internalTypeAField is not present in VersionedType1 + Assert.assertEquals(versionedType2.stringSet, result.stringSet); + } + + @Test + public void shouldMapNonPrimitiveWrapperToPrimitiveWrapperIfCommonFieldIsTheSame() { + HollowObjectMapper writerMapper = new HollowObjectMapper(new HollowWriteStateEngine()); + writerMapper.initializeTypeState(TypeStateA1.class); + + FlatRecordWriter flatRecordWriter = new FlatRecordWriter( + writerMapper.getStateEngine(), new FakeHollowSchemaIdentifierMapper(writerMapper.getStateEngine())); + + TypeStateA1 typeStateA1 = new TypeStateA1(); + typeStateA1.id = 1; + typeStateA1.subValue = new SubValue(); + typeStateA1.subValue.value = "value"; + + flatRecordWriter.reset(); + writerMapper.writeFlat(typeStateA1, flatRecordWriter); + FlatRecord fr = flatRecordWriter.generateFlatRecord(); + + HollowObjectMapper readerMapper = new HollowObjectMapper(new HollowWriteStateEngine()); + readerMapper.initializeTypeState(TypeStateA2.class); + TypeStateA2 result = readerMapper.readFlat(new FlatRecordTraversalObjectNode(fr)); + + Assert.assertEquals("value", result.subValue); + } + + @Test + public void shouldMapPrimitiveWrapperToNonPrimitiveWrapperIfCommonFieldIsTheSame() { + HollowObjectMapper writerMapper = new HollowObjectMapper(new HollowWriteStateEngine()); + writerMapper.initializeTypeState(TypeStateA2.class); + + FlatRecordWriter flatRecordWriter = new FlatRecordWriter( + writerMapper.getStateEngine(), new FakeHollowSchemaIdentifierMapper(writerMapper.getStateEngine())); + + TypeStateA2 typeStateA2 = new TypeStateA2(); + typeStateA2.id = 1; + typeStateA2.subValue = "value"; + + writerMapper.writeFlat(typeStateA2, flatRecordWriter); + FlatRecord fr = flatRecordWriter.generateFlatRecord(); + + + HollowObjectMapper readerMapper = new HollowObjectMapper(new HollowWriteStateEngine()); + readerMapper.initializeTypeState(TypeStateA1.class); + TypeStateA1 result = readerMapper.readFlat(new FlatRecordTraversalObjectNode(fr)); + + Assert.assertEquals("value", result.subValue.value); + } + + @HollowPrimaryKey(fields={"boxedIntegerField", "stringField"}) + private static class TypeWithAllSimpleTypes { + Integer boxedIntegerField; + Boolean boxedBooleanField; + Double boxedDoubleField; + Float boxedFloatField; + Long boxedLongField; + Short boxedShortField; + Byte boxedByteField; + Character boxedCharField; + + int primitiveIntegerField; + boolean primitiveBooleanField; + double primitiveDoubleField; + float primitiveFloatField; + long primitiveLongField; + short primitiveShortField; + byte primitiveByteField; + char primitiveCharField; + byte[] byteArrayField; + String stringField; + + @HollowInline + Integer inlinedIntegerField; + @HollowInline + Boolean inlinedBooleanField; + @HollowInline + Double inlinedDoubleField; + @HollowInline + Float inlinedFloatField; + @HollowInline + Long inlinedLongField; + @HollowInline + Short inlinedShortField; + @HollowInline + Byte inlinedByteField; + @HollowInline + Character inlinedCharField; + @HollowInline + String inlinedStringField; + + @HollowTypeName(name = "NamedIntegerField") + Integer namedIntegerField; + @HollowTypeName(name = "NamedBooleanField") + Boolean namedBooleanField; + @HollowTypeName(name = "NamedDoubleField") + Double namedDoubleField; + @HollowTypeName(name = "NamedFloatField") + Float namedFloatField; + @HollowTypeName(name = "NamedLongField") + Long namedLongField; + @HollowTypeName(name = "NamedShortField") + Short namedShortField; + @HollowTypeName(name = "NamedByteField") + Byte namedByteField; + @HollowTypeName(name = "NamedCharField") + Character namedCharField; + @HollowTypeName(name = "NamedByteArrayField") + byte[] namedByteArrayField; + @HollowTypeName(name = "NamedStringField") + String namedStringField; + + InternalTypeA internalTypeAField; + InternalTypeC internalTypeCField; + + @Override + public boolean equals(Object o) { + if(o instanceof TypeWithAllSimpleTypes) { + TypeWithAllSimpleTypes other = (TypeWithAllSimpleTypes)o; + return Objects.equals(boxedIntegerField, other.boxedIntegerField) && + Objects.equals(boxedBooleanField, other.boxedBooleanField) && + Objects.equals(boxedDoubleField, other.boxedDoubleField) && + Objects.equals(boxedFloatField, other.boxedFloatField) && + Objects.equals(boxedLongField, other.boxedLongField) && + Objects.equals(boxedShortField, other.boxedShortField) && + Objects.equals(boxedByteField, other.boxedByteField) && + Objects.equals(boxedCharField, other.boxedCharField) && + primitiveIntegerField == other.primitiveIntegerField && + primitiveBooleanField == other.primitiveBooleanField && + primitiveDoubleField == other.primitiveDoubleField && + primitiveFloatField == other.primitiveFloatField && + primitiveLongField == other.primitiveLongField && + primitiveShortField == other.primitiveShortField && + primitiveByteField == other.primitiveByteField && + primitiveCharField == other.primitiveCharField && + Arrays.equals(byteArrayField, other.byteArrayField) && + Objects.equals(stringField, other.stringField) && + Objects.equals(inlinedIntegerField, other.inlinedIntegerField) && + Objects.equals(inlinedBooleanField, other.inlinedBooleanField) && + Objects.equals(inlinedDoubleField, other.inlinedDoubleField) && + Objects.equals(inlinedFloatField, other.inlinedFloatField) && + Objects.equals(inlinedLongField, other.inlinedLongField) && + Objects.equals(inlinedShortField, other.inlinedShortField) && + Objects.equals(inlinedByteField, other.inlinedByteField) && + Objects.equals(inlinedCharField, other.inlinedCharField) && + Objects.equals(inlinedStringField, other.inlinedStringField) && + Objects.equals(namedIntegerField, other.namedIntegerField) && + Objects.equals(namedBooleanField, other.namedBooleanField) && + Objects.equals(namedDoubleField, other.namedDoubleField) && + Objects.equals(namedFloatField, other.namedFloatField) && + Objects.equals(namedLongField, other.namedLongField) && + Objects.equals(namedShortField, other.namedShortField) && + Objects.equals(namedByteField, other.namedByteField) && + Objects.equals(namedCharField, other.namedCharField) && + Arrays.equals(namedByteArrayField, other.namedByteArrayField) && + Objects.equals(namedStringField, other.namedStringField) && + Objects.equals(internalTypeAField, other.internalTypeAField) && + Objects.equals(internalTypeCField, other.internalTypeCField); + } + return false; + } + + @Override + public String toString() { + return "TypeA{" + + "boxedIntegerField=" + boxedIntegerField + + ", boxedBooleanField=" + boxedBooleanField + + ", boxedDoubleField=" + boxedDoubleField + + ", boxedFloatField=" + boxedFloatField + + ", boxedLongField=" + boxedLongField + + ", boxedShortField=" + boxedShortField + + ", boxedByteField=" + boxedByteField + + ", boxedCharField=" + boxedCharField + + ", primitiveIntegerField=" + primitiveIntegerField + + ", primitiveBooleanField=" + primitiveBooleanField + + ", primitiveDoubleField=" + primitiveDoubleField + + ", primitiveFloatField=" + primitiveFloatField + + ", primitiveLongField=" + primitiveLongField + + ", primitiveShortField=" + primitiveShortField + + ", primitiveByteField=" + primitiveByteField + + ", primitiveCharField=" + primitiveCharField + + ", byteArrayField=" + Arrays.toString(byteArrayField) + + ", stringField='" + stringField + '\'' + + ", inlinedIntegerField=" + inlinedIntegerField + + ", inlinedBooleanField=" + inlinedBooleanField + + ", inlinedDoubleField=" + inlinedDoubleField + + ", inlinedFloatField=" + inlinedFloatField + + ", inlinedLongField=" + inlinedLongField + + ", inlinedShortField=" + inlinedShortField + + ", inlinedByteField=" + inlinedByteField + + ", inlinedCharField=" + inlinedCharField + + ", inlinedStringField=" + inlinedStringField + + ", namedIntegerField=" + namedIntegerField + + ", namedBooleanField=" + namedBooleanField + + ", namedDoubleField=" + namedDoubleField + + ", namedFloatField=" + namedFloatField + + ", namedLongField=" + namedLongField + + ", namedShortField=" + namedShortField + + ", namedByteField=" + namedByteField + + ", namedCharField=" + namedCharField + + ", namedByteArrayField=" + Arrays.toString(namedByteArrayField) + + ", namedStringField='" + namedStringField + '\'' + + ", internalTypeAField=" + internalTypeAField + + ", internalTypeCField=" + internalTypeCField + + '}'; + } + } + + @HollowPrimaryKey(fields={"id"}) + private static class TypeWithCollections { + int id; + List stringList; + Set stringSet; + Map integerStringMap; + List internalTypeAList; + Set internalTypeASet; + Map integerInternalTypeAMap; + Map internalTypeAStringMap; + @HollowHashKey(fields="a2") + public Map multiTypeMap; + + @Override + public boolean equals(Object o) { + if(o instanceof TypeWithCollections) { + TypeWithCollections other = (TypeWithCollections)o; + return id == other.id && + Objects.equals(stringList, other.stringList) && + Objects.equals(stringSet, other.stringSet) && + Objects.equals(integerStringMap, other.integerStringMap) && + Objects.equals(internalTypeAList, other.internalTypeAList) && + Objects.equals(internalTypeASet, other.internalTypeASet) && + Objects.equals(integerInternalTypeAMap, other.integerInternalTypeAMap) && + Objects.equals(internalTypeAStringMap, other.internalTypeAStringMap) && + Objects.equals(multiTypeMap, other.multiTypeMap); + } + return false; + } + + @Override + public String toString() { + return "TypeWithCollections{" + + "id=" + id + + ", stringList=" + stringList + + ", stringSet=" + stringSet + + ", integerStringMap=" + integerStringMap + + ", internalTypeAList=" + internalTypeAList + + ", internalTypeASet=" + internalTypeASet + + ", integerInternalTypeAMap=" + integerInternalTypeAMap + + ", internalTypeAStringMap=" + internalTypeAStringMap + + ", multiTypeMap=" + multiTypeMap + + '}'; + } + } + + @HollowTypeName(name = "VersionedType") + private static class VersionedType1 { + String stringField; + Integer boxedIntegerField; + double primitiveDoubleField; + InternalTypeA internalTypeAField; + Set stringSet; + + @Override + public boolean equals(Object o) { + if(o instanceof VersionedType1) { + VersionedType1 other = (VersionedType1)o; + return Objects.equals(stringField, other.stringField) && + Objects.equals(boxedIntegerField, other.boxedIntegerField) && + primitiveDoubleField == other.primitiveDoubleField && + Objects.equals(internalTypeAField, other.internalTypeAField) && + Objects.equals(stringSet, other.stringSet); + } + return false; + } + + @Override + public String toString() { + return "VersionedType1{" + + "stringField='" + stringField + '\'' + + ", boxedIntegerField=" + boxedIntegerField + + ", primitiveDoubleField=" + primitiveDoubleField + + ", internalTypeAField=" + internalTypeAField + + ", stringSet=" + stringSet + + '}'; + } + } + + @HollowTypeName(name = "VersionedType") + private static class VersionedType2 { + // No longer has the stringField + Integer boxedIntegerField; + double primitiveDoubleField; + // No longer has the typeBField + Set stringSet; + char charField; // Added a char field + InternalTypeB internalTypeBField; // Added a new type field + + @Override + public boolean equals(Object o) { + if(o instanceof VersionedType2) { + VersionedType2 other = (VersionedType2)o; + return Objects.equals(boxedIntegerField, other.boxedIntegerField) && + primitiveDoubleField == other.primitiveDoubleField && + Objects.equals(stringSet, other.stringSet) && + charField == other.charField && + Objects.equals(internalTypeBField, other.internalTypeBField); + } + return false; + } + + @Override + public String toString() { + return "VersionedType2{" + + "boxedIntegerField=" + boxedIntegerField + + ", primitiveDoubleField=" + primitiveDoubleField + + ", stringSet=" + stringSet + + ", charField=" + charField + + ", internalTypeBField=" + internalTypeBField + + '}'; + } + } + + private static class InternalTypeA { + Integer id; + String name; + + public InternalTypeA(Integer id) { + this(id, String.valueOf(id)); + } + + public InternalTypeA(Integer id, String name) { + this.id = id; + this.name = name; + } + + @Override + public int hashCode() { + return Objects.hash(id, name); + } + + @Override + public boolean equals(Object o) { + if(o instanceof InternalTypeA) { + InternalTypeA other = (InternalTypeA)o; + return id.equals(other.id) && name.equals(other.name); + } + return false; + } + + @Override + public String toString() { + return "InternalTypeA{" + + "id=" + id + + ", name='" + name + '\'' + + '}'; + } + } + + private static class InternalTypeB { + Integer id; + String name; + + public InternalTypeB(Integer id) { + this(id, String.valueOf(id)); + } + + public InternalTypeB(Integer id, String name) { + this.id = id; + this.name = name; + } + + @Override + public int hashCode() { + return Objects.hash(id, name); + } + + @Override + public boolean equals(Object o) { + if(o instanceof InternalTypeB) { + InternalTypeB other = (InternalTypeB)o; + return id.equals(other.id) && name.equals(other.name); + } + return false; + } + + @Override + public String toString() { + return "InternalTypeB{" + + "id=" + id + + ", name='" + name + '\'' + + '}'; + } + } + + public static class InternalTypeC { + @HollowInline + String data; + + public InternalTypeC(String data) { + this.data = data; + } + + @Override + public boolean equals(Object o) { + if(o instanceof InternalTypeC) { + InternalTypeC other = (InternalTypeC)o; + return data.equals(other.data); + } + return false; + } + + @Override + public String toString() { + return "InternalTypeC{" + + "data=" + data + + '}'; + } + } + + @HollowTypeName(name="TypeStateA") + @HollowPrimaryKey(fields="id") + public static class TypeStateA1 { + public int id; + public SubValue subValue; + } + + @HollowTypeName(name="TypeStateA") + @HollowPrimaryKey(fields="id") + public static class TypeStateA2 { + public int id; + @HollowTypeName(name="SubValue") + public String subValue; + } + + public static class SubValue { + @HollowInline + public String value; + @HollowInline + public String anotherValue; + } + + enum AnEnum { + SOME_VALUE_A, + SOME_VALUE_B, + SOME_VALUE_C, + } + + enum ComplexEnum { + SOME_VALUE_A("A", 1), + SOME_VALUE_B("B", 2), + SOME_VALUE_C("C", 3); + + final String value; + final int anotherValue; + + ComplexEnum(String value, int anotherValue) { + this.value = value; + this.anotherValue = anotherValue; + } + } + + @HollowTypeName(name = "SpecialWrapperTypesTest") + @HollowPrimaryKey(fields = {"id"}) + static class SpecialWrapperTypesTest { + long id; + @HollowTypeName(name = "AnEnum") + AnEnum type; + @HollowTypeName(name = "ComplexEnum") + ComplexEnum complexEnum; + Date dateCreated; + + @Override + public boolean equals(Object o) { + if (o instanceof SpecialWrapperTypesTest) { + SpecialWrapperTypesTest other = (SpecialWrapperTypesTest) o; + return Objects.equals(id, other.id) && + Objects.equals(type, other.type) && + Objects.equals(complexEnum, other.complexEnum) && + Objects.equals(dateCreated, other.dateCreated); + } + return false; + } + + @Override + public String toString() { + return "SpecialWrapperTypesTest{" + + "id=" + id + + ", type='" + type + '\'' + + ", complexEnum='" + complexEnum + '\'' + + ", dateCreated=" + dateCreated + + '}'; + } + } + + public static class TypeA10 { + public int a1; + @HollowInline + public String a2; + public long a3; + + public TypeA10(int a1, String a2, long a3) { + this.a1 = a1; + this.a2 = a2; + this.a3 = a3; + } + + @Override + public String toString() { + return "{" + a1 + "," + a2 + "," + a3 + "}"; + } + + @Override + public int hashCode() { + return Objects.hash(a1, a2, a3); + } + + @Override + public boolean equals(Object o) { + if(o instanceof TypeA10) { + TypeA10 other = (TypeA10)o; + return a1 == other.a1 && a2.equals(other.a2) && a3 == other.a3; + } + return false; + } + } + + public static class TypeC10 { + public byte[] c1; + + public TypeC10(byte[] c1) { + this.c1 = c1; + } + + @Override + public String toString() { + return Base64.getEncoder().encodeToString(c1); + } + + @Override + public boolean equals(Object o) { + if(o instanceof TypeC10) { + TypeC10 other = (TypeC10)o; + return Arrays.equals(c1, other.c1); + } + return false; + } + } +} diff --git a/hollow/src/test/java/com/netflix/hollow/core/write/objectmapper/flatrecords/FakeHollowIdentifierMapper.java b/hollow/src/test/java/com/netflix/hollow/core/write/objectmapper/flatrecords/FakeHollowIdentifierMapper.java new file mode 100644 index 0000000000..de3e95f0a3 --- /dev/null +++ b/hollow/src/test/java/com/netflix/hollow/core/write/objectmapper/flatrecords/FakeHollowIdentifierMapper.java @@ -0,0 +1,48 @@ +package com.netflix.hollow.core.write.objectmapper.flatrecords; + +import com.netflix.hollow.core.index.key.PrimaryKey; +import com.netflix.hollow.core.schema.HollowObjectSchema; +import com.netflix.hollow.core.schema.HollowSchema; +import com.netflix.hollow.core.schema.SimpleHollowDataset; +import com.netflix.hollow.core.write.objectmapper.flatrecords.HollowSchemaIdentifierMapper; + +import java.util.ArrayList; +import java.util.List; + +public class FakeHollowIdentifierMapper implements HollowSchemaIdentifierMapper { + private final List allSchemas = new ArrayList<>(); + + @Override + public HollowSchema getSchema(int identifier) { + return allSchemas.get(identifier); + } + + @Override + public HollowObjectSchema.FieldType[] getPrimaryKeyFieldTypes(int identifier) { + HollowSchema schema = getSchema(identifier); + if (schema.getSchemaType() == HollowSchema.SchemaType.OBJECT) { + PrimaryKey primaryKey = ((HollowObjectSchema) schema).getPrimaryKey(); + + if (primaryKey != null) { + HollowObjectSchema.FieldType[] fieldTypes = new HollowObjectSchema.FieldType[primaryKey.numFields()]; + + for (int i = 0; i < fieldTypes.length; i++) { + fieldTypes[i] = primaryKey.getFieldType(new SimpleHollowDataset(allSchemas), i); + } + + return fieldTypes; + } + } + + return null; + } + + @Override + public int getSchemaId(HollowSchema schema) { + for(int i=0;i ((FlatRecordTraversalObjectNode) n).getFieldValue("value")) + .containsExactlyInAnyOrder("US", "CA"); + + // tags + FlatRecordTraversalMapNode tagsNode = (FlatRecordTraversalMapNode) node.getFieldNode("tags"); + assertThat(tagsNode).hasSize(2); + assertThat(tagsNode.entrySet()) + .extracting( + entry -> ((FlatRecordTraversalObjectNode) entry.getKey()).getFieldValue("value"), + entry -> ((FlatRecordTraversalObjectNode) entry.getValue()).getFieldValue("value")) + .containsExactlyInAnyOrder( + tuple("Type", "Movie"), + tuple("Genre", "action")); + + // cast + FlatRecordTraversalSetNode castNode = (FlatRecordTraversalSetNode) node.getFieldNode("cast"); + assertThat(castNode) + .extracting( + n -> ((FlatRecordTraversalObjectNode) n).getFieldValue("id"), + n -> ((FlatRecordTraversalObjectNode) n).getFieldValue("name"), + n -> { + FlatRecordTraversalObjectNode elementNode = (FlatRecordTraversalObjectNode) n; + FlatRecordTraversalObjectNode roleNode = (FlatRecordTraversalObjectNode) elementNode.getFieldNode("role"); + return roleNode.getFieldValue("_name"); + }) + .containsExactlyInAnyOrder( + tuple(1, "Benedict Cumberbatch", "ACTOR"), + tuple(2, "Martin Freeman", "ACTOR"), + tuple(2, "Quentin Tarantino", "DIRECTOR") + ); + + // awardsReceived + FlatRecordTraversalListNode awardsReceivedNode = (FlatRecordTraversalListNode) node.getFieldNode("awardsReceived"); + assertThat(awardsReceivedNode) + .extracting( + n -> { + FlatRecordTraversalObjectNode awardNode = (FlatRecordTraversalObjectNode) n; + FlatRecordTraversalObjectNode awardNameNode = (FlatRecordTraversalObjectNode) awardNode.getFieldNode("name"); + return awardNameNode.getFieldValue("value"); + }, + n -> ((FlatRecordTraversalObjectNode) n).getFieldValue("year")) + .containsExactlyInAnyOrder( + tuple("Oscar", 2020), + tuple("Golden Globe", 2025) + ); + } + + @Test + public void testWalkFlatRecordUsingSchemaHints() { + FlatRecord flatRecord = createTestFlatRecord(); + FlatRecordTraversalObjectNode node = new FlatRecordTraversalObjectNode(flatRecord); + + // primitives + assertThat(node.getFieldValueInt("id")).isEqualTo(1); + assertThat(node.getFieldValueIntBoxed("id")).isEqualTo(1); + assertThat(node.getFieldValueInt("releaseYear")).isEqualTo(2020); + assertThat(node.getFieldValueIntBoxed("releaseYear")).isEqualTo(2020); + + // title + FlatRecordTraversalObjectNode titleNode = node.getObjectFieldNode("title"); + assertThat(titleNode.getFieldValueString("value")).isEqualTo("Movie1"); + + // primaryGenre + FlatRecordTraversalObjectNode primaryGenreNode = node.getObjectFieldNode("primaryGenre"); + assertThat(primaryGenreNode.getFieldValueString("value")).isEqualTo("action"); + + // maturityRating + FlatRecordTraversalObjectNode maturityRatingNode = node.getObjectFieldNode("maturityRating"); + FlatRecordTraversalObjectNode maturityRatingNodeRating = maturityRatingNode.getObjectFieldNode("rating"); + assertThat(maturityRatingNodeRating.getFieldValueString("value")).isEqualTo("PG"); + FlatRecordTraversalObjectNode maturityRatingNodeAdvisory = maturityRatingNode.getObjectFieldNode("advisory"); + assertThat(maturityRatingNodeAdvisory.getFieldValueString("value")).isEqualTo("Some advisory"); + + // countries + FlatRecordTraversalSetNode countriesNode = node.getSetFieldNode("countries"); + Iterable countryNodes = countriesNode::objects; + assertThat(countryNodes) + .extracting(n -> n.getFieldValueString("value")) + .containsExactlyInAnyOrder("US", "CA"); + + // tags + FlatRecordTraversalMapNode tagsNode = node.getMapFieldNode("tags"); + assertThat(tagsNode).hasSize(2); + Iterable> tagNodes = tagsNode::entrySetIterator; + assertThat(tagNodes) + .extracting( + entry -> entry.getKey().getFieldValue("value"), + entry -> entry.getValue().getFieldValue("value")) + .containsExactlyInAnyOrder( + tuple("Type", "Movie"), + tuple("Genre", "action")); + + // cast + FlatRecordTraversalSetNode castNode = node.getSetFieldNode("cast"); + Iterable castNodes = castNode::objects; + assertThat(castNodes) + .extracting( + n -> n.getFieldValueInt("id"), + n -> n.getFieldValueString("name"), + n -> n.getObjectFieldNode("role").getFieldValueString("_name")) + .containsExactlyInAnyOrder( + tuple(1, "Benedict Cumberbatch", "ACTOR"), + tuple(2, "Martin Freeman", "ACTOR"), + tuple(2, "Quentin Tarantino", "DIRECTOR") + ); + + // awardsReceived + FlatRecordTraversalListNode awardsReceivedNode = node.getListFieldNode("awardsReceived"); + assertThat(awardsReceivedNode.getObject(0)) + .extracting( + n -> n.getObjectFieldNode("name").getFieldValueString("value"), + n -> n.getFieldValueInt("year")) + .containsExactly("Oscar", 2020); + assertThat(awardsReceivedNode.getObject(1)) + .extracting( + n -> n.getObjectFieldNode("name").getFieldValueString("value"), + n -> n.getFieldValueInt("year")) + .containsExactly("Golden Globe", 2025); + } + + @Test + public void testWalkFlatRecordThroughTheNodesWithNulls() { + FlatRecord flatRecord = createTestFlatRecord(); + FlatRecordTraversalObjectNode node = new FlatRecordTraversalObjectNode(flatRecord); + + // nulls + assertThat(node.getFieldValue("nonExistentField")).isNull(); + assertThat(node.getFieldNode("nonExistentField")).isNull(); + } + + private FlatRecord createTestFlatRecord() { + Movie movie1 = new Movie(); + movie1.id = 1; + movie1.title = "Movie1"; + movie1.releaseYear = 2020; + movie1.primaryGenre = "action"; + movie1.maturityRating = new MaturityRating("PG", "Some advisory"); + movie1.countries = new HashSet<>(); + movie1.countries.add(new Country("US")); + movie1.countries.add(new Country("CA")); + movie1.tags = new HashMap<>(); + movie1.tags.put(new Tag("Type"), new TagValue("Movie")); + movie1.tags.put(new Tag("Genre"), new TagValue("action")); + movie1.cast = new HashSet<>(); + movie1.cast.add(new CastMember(1, "Benedict Cumberbatch", CastRole.ACTOR)); + movie1.cast.add(new CastMember(2, "Martin Freeman", CastRole.ACTOR)); + movie1.cast.add(new CastMember(2, "Quentin Tarantino", CastRole.DIRECTOR)); + movie1.awardsReceived = new ArrayList<>(); + movie1.awardsReceived.add(new Award("Oscar", 2020)); + movie1.awardsReceived.add(new Award("Golden Globe", 2025)); + + SimpleHollowDataset dataset = SimpleHollowDataset.fromClassDefinitions(Movie.class); + FakeHollowSchemaIdentifierMapper idMapper = new FakeHollowSchemaIdentifierMapper(dataset); + HollowObjectMapper objMapper = new HollowObjectMapper(HollowWriteStateCreator.createWithSchemas(dataset.getSchemas())); + FlatRecordWriter flatRecordWriter = new FlatRecordWriter(dataset, idMapper); + + flatRecordWriter.reset(); + objMapper.writeFlat(movie1, flatRecordWriter); + return flatRecordWriter.generateFlatRecord(); + } +} diff --git a/hollow/src/test/java/com/netflix/hollow/core/write/objectmapper/flatrecords/traversal/FlatRecordTraversalObjectNodeEqualityTest.java b/hollow/src/test/java/com/netflix/hollow/core/write/objectmapper/flatrecords/traversal/FlatRecordTraversalObjectNodeEqualityTest.java new file mode 100644 index 0000000000..b88f59a6c0 --- /dev/null +++ b/hollow/src/test/java/com/netflix/hollow/core/write/objectmapper/flatrecords/traversal/FlatRecordTraversalObjectNodeEqualityTest.java @@ -0,0 +1,621 @@ +package com.netflix.hollow.core.write.objectmapper.flatrecords.traversal; + + +import com.netflix.hollow.core.schema.SimpleHollowDataset; +import com.netflix.hollow.core.util.HollowWriteStateCreator; +import com.netflix.hollow.core.write.HollowWriteStateEngine; +import com.netflix.hollow.core.write.objectmapper.HollowInline; +import com.netflix.hollow.core.write.objectmapper.HollowObjectMapper; +import com.netflix.hollow.core.write.objectmapper.HollowPrimaryKey; +import com.netflix.hollow.core.write.objectmapper.HollowTypeName; +import com.netflix.hollow.core.write.objectmapper.flatrecords.FakeHollowIdentifierMapper; +import com.netflix.hollow.core.write.objectmapper.flatrecords.FakeHollowSchemaIdentifierMapper; +import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecord; +import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecordWriter; +import com.netflix.hollow.core.write.objectmapper.flatrecords.HollowSchemaIdentifierMapper; +import com.netflix.hollow.test.dto.Award; +import com.netflix.hollow.test.dto.CastMember; +import com.netflix.hollow.test.dto.CastRole; +import com.netflix.hollow.test.dto.Country; +import com.netflix.hollow.test.dto.MaturityRating; +import com.netflix.hollow.test.dto.Movie; +import com.netflix.hollow.test.dto.Tag; +import com.netflix.hollow.test.dto.TagValue; +import org.assertj.core.api.Assertions; +import org.junit.Before; +import org.junit.Test; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; + +import static org.assertj.core.api.Assertions.assertThat; + +public class FlatRecordTraversalObjectNodeEqualityTest { + // Mapper and writer for types of version 1 + private HollowObjectMapper mapper1; + private FlatRecordWriter writer1; + + // Mapper and writer for types of version 2 + private HollowObjectMapper mapper2; + private FlatRecordWriter writer2; + @Before + public void beforeEach() { + HollowSchemaIdentifierMapper idMapper = new FakeHollowIdentifierMapper(); + + mapper1 = new HollowObjectMapper(new HollowWriteStateEngine()); + mapper1.initializeTypeState(TypeState1.class); + mapper1.initializeTypeState(IntSetTypeState1.class); + mapper1.initializeTypeState(IntTypeState1.class); + mapper1.initializeTypeState(RecordWithSubObject1.class); + writer1 = new FlatRecordWriter(mapper1.getStateEngine(), idMapper); + + mapper2 = new HollowObjectMapper(new HollowWriteStateEngine()); + mapper2.initializeTypeState(TypeState2.class); + mapper2.initializeTypeState(IntSetTypeState2.class); + mapper2.initializeTypeState(IntTypeState2.class); + mapper2.initializeTypeState(RecordWithSubObject2.class); + writer2 = new FlatRecordWriter(mapper2.getStateEngine(), idMapper); + } + @Test + public void shouldEqualOnTheSameFlatRecord() { + FlatRecord flatRecord1 = createTestFlatRecord1(); + FlatRecord flatRecord2 = createTestFlatRecord1(); + Assertions.assertThat(FlatRecordTraversalObjectNodeEquality.equals(new FlatRecordTraversalObjectNode(flatRecord1), new FlatRecordTraversalObjectNode(flatRecord2))).isTrue(); + } + + @Test + public void shouldFailOnTheDifferentFlatRecord() { + FlatRecord flatRecord1 = createTestFlatRecord1(); + FlatRecord flatRecord2 = createTestFlatRecord2(); + Assertions.assertThat(FlatRecordTraversalObjectNodeEquality.equals(new FlatRecordTraversalObjectNode(flatRecord1), new FlatRecordTraversalObjectNode(flatRecord2))).isFalse(); + } + + @Test + public void differentMap() { + SimpleHollowDataset dataset = SimpleHollowDataset.fromClassDefinitions(Movie.class); + FakeHollowSchemaIdentifierMapper idMapper = new FakeHollowSchemaIdentifierMapper(dataset); + HollowObjectMapper objMapper = new HollowObjectMapper(HollowWriteStateCreator.createWithSchemas(dataset.getSchemas())); + FlatRecordWriter flatRecordWriter = new FlatRecordWriter(dataset, idMapper); + + Movie movie1 = new Movie(); + movie1.tags = new HashMap<>(); + movie1.tags.put(new Tag("Type"), new TagValue("Movie")); + movie1.tags.put(new Tag("Genre"), new TagValue("action")); + + Movie movie2 = new Movie(); + movie2.tags = new HashMap<>(); + movie2.tags.put(new Tag("Type"), new TagValue("Movie")); + movie2.tags.put(new Tag("Genre"), new TagValue("comedy")); + + flatRecordWriter.reset(); + objMapper.writeFlat(movie1, flatRecordWriter); + FlatRecord flatRecord1 = flatRecordWriter.generateFlatRecord(); + + flatRecordWriter.reset(); + objMapper.writeFlat(movie2, flatRecordWriter); + FlatRecord flatRecord2 = flatRecordWriter.generateFlatRecord(); + + Assertions.assertThat(FlatRecordTraversalObjectNodeEquality.equals(new FlatRecordTraversalObjectNode(flatRecord1), new FlatRecordTraversalObjectNode(flatRecord2))).isFalse(); + Assertions.assertThat(FlatRecordTraversalObjectNodeEquality.equals(new FlatRecordTraversalObjectNode(flatRecord2), new FlatRecordTraversalObjectNode(flatRecord1))).isFalse(); + } + + @Test + public void differentSet() { + SimpleHollowDataset dataset = SimpleHollowDataset.fromClassDefinitions(Movie.class); + FakeHollowSchemaIdentifierMapper idMapper = new FakeHollowSchemaIdentifierMapper(dataset); + HollowObjectMapper objMapper = new HollowObjectMapper(HollowWriteStateCreator.createWithSchemas(dataset.getSchemas())); + FlatRecordWriter flatRecordWriter = new FlatRecordWriter(dataset, idMapper); + + Movie movie1 = new Movie(); + movie1.countries = new HashSet<>(); + movie1.countries.add(new Country("US")); + movie1.countries.add(new Country("CA")); + + Movie movie2 = new Movie(); + movie2.countries = new HashSet<>(); + movie2.countries.add(new Country("US")); + movie2.countries.add(new Country("CB")); + + flatRecordWriter.reset(); + objMapper.writeFlat(movie1, flatRecordWriter); + FlatRecord flatRecord1 = flatRecordWriter.generateFlatRecord(); + + flatRecordWriter.reset(); + objMapper.writeFlat(movie2, flatRecordWriter); + FlatRecord flatRecord2 = flatRecordWriter.generateFlatRecord(); + + Assertions.assertThat(FlatRecordTraversalObjectNodeEquality.equals(new FlatRecordTraversalObjectNode(flatRecord1), new FlatRecordTraversalObjectNode(flatRecord2))).isFalse(); + } + + @Test + public void differentList() { + SimpleHollowDataset dataset = SimpleHollowDataset.fromClassDefinitions(Movie.class); + FakeHollowSchemaIdentifierMapper idMapper = new FakeHollowSchemaIdentifierMapper(dataset); + HollowObjectMapper objMapper = new HollowObjectMapper(HollowWriteStateCreator.createWithSchemas(dataset.getSchemas())); + FlatRecordWriter flatRecordWriter = new FlatRecordWriter(dataset, idMapper); + + Movie movie1 = new Movie(); + movie1.awardsReceived = new ArrayList<>(); + movie1.awardsReceived.add(new Award("Oscar", 2020)); + movie1.awardsReceived.add(new Award("Golden Globe", 2025)); + + Movie movie2 = new Movie(); + movie2.awardsReceived = new ArrayList<>(); + movie2.awardsReceived.add(new Award("Oscar", 2020)); + movie2.awardsReceived.add(new Award("Golden Globe", 2026)); + + flatRecordWriter.reset(); + objMapper.writeFlat(movie1, flatRecordWriter); + FlatRecord flatRecord1 = flatRecordWriter.generateFlatRecord(); + + flatRecordWriter.reset(); + objMapper.writeFlat(movie2, flatRecordWriter); + FlatRecord flatRecord2 = flatRecordWriter.generateFlatRecord(); + + Assertions.assertThat(FlatRecordTraversalObjectNodeEquality.equals(new FlatRecordTraversalObjectNode(flatRecord1), new FlatRecordTraversalObjectNode(flatRecord2))).isFalse(); + } + + + private FlatRecord createTestFlatRecord1() { + Movie movie1 = new Movie(); + movie1.id = 1; + movie1.title = "Movie1"; + movie1.releaseYear = 2020; + movie1.primaryGenre = "action"; + movie1.maturityRating = new MaturityRating("PG", "Some advisory"); + movie1.countries = new HashSet<>(); + movie1.countries.add(new Country("US")); + movie1.countries.add(new Country("CA")); + movie1.tags = new HashMap<>(); + movie1.tags.put(new Tag("Type"), new TagValue("Movie")); + movie1.tags.put(new Tag("Genre"), new TagValue("action")); + movie1.cast = new HashSet<>(); + movie1.cast.add(new CastMember(1, "Benedict Cumberbatch", CastRole.ACTOR)); + movie1.cast.add(new CastMember(2, "Martin Freeman", CastRole.ACTOR)); + movie1.cast.add(new CastMember(2, "Quentin Tarantino", CastRole.DIRECTOR)); + movie1.awardsReceived = new ArrayList<>(); + movie1.awardsReceived.add(new Award("Oscar", 2020)); + movie1.awardsReceived.add(new Award("Golden Globe", 2025)); + + SimpleHollowDataset dataset = SimpleHollowDataset.fromClassDefinitions(Movie.class); + FakeHollowSchemaIdentifierMapper idMapper = new FakeHollowSchemaIdentifierMapper(dataset); + HollowObjectMapper objMapper = new HollowObjectMapper(HollowWriteStateCreator.createWithSchemas(dataset.getSchemas())); + FlatRecordWriter flatRecordWriter = new FlatRecordWriter(dataset, idMapper); + + flatRecordWriter.reset(); + objMapper.writeFlat(movie1, flatRecordWriter); + return flatRecordWriter.generateFlatRecord(); + } + + @Test + public void shouldFindRecordsEqualOnDifferentDataModelsWithDifferentValues() { + RecordWithSubObject1 left = new RecordWithSubObject1(); + left.id = "ID"; + left.intField = 1; + left.subObject = new RecordSubObject(); + left.subObject.stringField = "A"; + left.subObject.intField = 1; + writer1.reset(); + mapper1.writeFlat(left, writer1); + FlatRecord leftRec = writer1.generateFlatRecord(); + + // RecordWithSubObject2 does not have a subObject field + RecordWithSubObject2 right = new RecordWithSubObject2(); + right.id = "ID"; + right.intField = 1; + writer2.reset(); + mapper2.writeFlat(right, writer2); + FlatRecord rightRec = writer2.generateFlatRecord(); + // With fuzzy matching, the records are equal if the intersection of the schemas have the same fields. + // In this case, `RecordWithSubObject2` does not know about `subObject` so it's not considered in the + // equality check. + Assertions.assertThat(FlatRecordTraversalObjectNodeEquality.equals(new FlatRecordTraversalObjectNode(leftRec), new FlatRecordTraversalObjectNode(rightRec))).isTrue(); + Assertions.assertThat(FlatRecordTraversalObjectNodeEquality.equals(new FlatRecordTraversalObjectNode(rightRec), new FlatRecordTraversalObjectNode(leftRec))).isTrue(); + } + + @Test + public void shouldFindRecordsUnequalOnTheSameDataModelWithAnObjectFieldNotSetOnOne() { + HollowSchemaIdentifierMapper schemaMapper = new FakeHollowIdentifierMapper(); + + HollowObjectMapper objectMapper = new HollowObjectMapper(new HollowWriteStateEngine()); + objectMapper.initializeTypeState(RecordWithSubObject1.class); + objectMapper.initializeTypeState(RecordSubObject.class); + FlatRecordWriter flatRecordWriter = new FlatRecordWriter(objectMapper.getStateEngine(), schemaMapper); + + RecordWithSubObject1 left = new RecordWithSubObject1(); + left.id = "ID"; + left.intField = 1; + left.subObject = new RecordSubObject(); + left.subObject.stringField = "A"; + left.subObject.intField = 1; + + flatRecordWriter.reset(); + objectMapper.writeFlat(left, flatRecordWriter); + FlatRecord leftRec = flatRecordWriter.generateFlatRecord(); + + RecordWithSubObject1 right = new RecordWithSubObject1(); + right.id = "ID"; + right.intField = 1; + flatRecordWriter.reset(); + objectMapper.writeFlat(right, flatRecordWriter); + FlatRecord rightRec = flatRecordWriter.generateFlatRecord(); + + // Even with fuzzy matching these records not equal bc "right" does not have `subObject` + // it's defined in both schemas + assertThat(FlatRecordTraversalObjectNodeEquality.equals(new FlatRecordTraversalObjectNode(leftRec), new FlatRecordTraversalObjectNode(rightRec))).isFalse(); + assertThat(FlatRecordTraversalObjectNodeEquality.equals(new FlatRecordTraversalObjectNode(rightRec), new FlatRecordTraversalObjectNode(leftRec))).isFalse(); + } + + @Test + public void shouldFindTwoPrimitiveSetsToBeEqualRegardlessOfOrder() { + IntSetTypeState1 intTypeState1 = new IntSetTypeState1(); + intTypeState1.id = "ID"; + intTypeState1.intSet = new HashSet<>(Arrays.asList(15, 5)); + + writer1.reset(); + mapper1.writeFlat(intTypeState1, writer1); + FlatRecord rec1 = writer1.generateFlatRecord(); + + IntSetTypeState2 intTypeState2 = new IntSetTypeState2(); + intTypeState2.id = "ID"; + intTypeState2.intSet = new HashSet<>(Arrays.asList(5, 15)); + writer2.reset(); + mapper2.writeFlat(intTypeState2, writer2); + FlatRecord rec2 = writer2.generateFlatRecord(); + + FlatRecordTraversalObjectNode leftNode = new FlatRecordTraversalObjectNode(rec1); + FlatRecordTraversalObjectNode rightNode = new FlatRecordTraversalObjectNode(rec2); + + // The order of the elements in the Set should not matter + assertThat(FlatRecordTraversalObjectNodeEquality.equals(leftNode, rightNode)).isTrue(); + assertThat(FlatRecordTraversalObjectNodeEquality.equals(rightNode, leftNode)).isTrue(); + } + + @Test + public void shouldFindTwoPrimitiveSetsToBeDifferentIfContentIsDifferent() { + IntSetTypeState1 intTypeState1 = new IntSetTypeState1(); + intTypeState1.id = "ID"; + intTypeState1.intSet = new HashSet<>(Arrays.asList(15, 5)); + + writer1.reset(); + mapper1.writeFlat(intTypeState1, writer1); + FlatRecord rec1 = writer1.generateFlatRecord(); + + IntSetTypeState2 intTypeState2 = new IntSetTypeState2(); + intTypeState2.id = "ID"; + intTypeState2.intSet = new HashSet<>(Arrays.asList(5, 20)); + writer2.reset(); + mapper2.writeFlat(intTypeState2, writer2); + FlatRecord rec2 = writer2.generateFlatRecord(); + + FlatRecordTraversalObjectNode leftNode = new FlatRecordTraversalObjectNode(rec1); + FlatRecordTraversalObjectNode rightNode = new FlatRecordTraversalObjectNode(rec2); + + // The order of the elements in the Set should not matter + assertThat(FlatRecordTraversalObjectNodeEquality.equals(leftNode, rightNode)).isFalse(); + assertThat(FlatRecordTraversalObjectNodeEquality.equals(rightNode, leftNode)).isFalse(); + } + + @Test + public void shouldProvideCollisionGuaranteesForIntegerCollisions_onObjects() { + IntTypeState1 intTypeState1 = new IntTypeState1(); + intTypeState1.intA = 15; + intTypeState1.intB = 5; + + writer1.reset(); + mapper1.writeFlat(intTypeState1, writer1); + FlatRecord rec1 = writer1.generateFlatRecord(); + + IntTypeState2 intTypeState2 = new IntTypeState2(); + intTypeState2.intA = 13; + intTypeState2.intB = 7; + writer2.reset(); + mapper2.writeFlat(intTypeState2, writer2); + FlatRecord rec2 = writer2.generateFlatRecord(); + + FlatRecordTraversalObjectNode leftNode = new FlatRecordTraversalObjectNode(rec1); + FlatRecordTraversalObjectNode rightNode = new FlatRecordTraversalObjectNode(rec2); + + assertThat(FlatRecordTraversalObjectNodeEquality.equals(leftNode, rightNode)).isFalse(); + assertThat(FlatRecordTraversalObjectNodeEquality.equals(rightNode, leftNode)).isFalse(); + } + + @Test + public void shouldUseExactFlagToConsiderExtraFieldsInEquality_usingReferences() { + TypeState1 typeState1 = new TypeState1(); + typeState1.longField = 1L; + typeState1.stringField = "A"; + typeState1.doubleField = 1.0; + typeState1.basicIntField = 1; + typeState1.basicIntFieldOnlyInTypeState1 = 1; // This field being set should make the records unequal. + + writer1.reset(); + mapper1.writeFlat(typeState1, writer1); + FlatRecord rec1 = writer1.generateFlatRecord(); + + TypeState2 typeState2 = new TypeState2(); + typeState2.longField = 1L; + typeState2.stringField = "A"; + typeState2.doubleField = 1.0; + typeState2.basicIntField = 1; + writer2.reset(); + mapper2.writeFlat(typeState2, writer2); + FlatRecord rec2 = writer2.generateFlatRecord(); + + FlatRecordTraversalObjectNode leftNode = new FlatRecordTraversalObjectNode(rec1); + FlatRecordTraversalObjectNode rightNode = new FlatRecordTraversalObjectNode(rec2); + + assertThat(FlatRecordTraversalObjectNodeEquality.equals(leftNode, rightNode)).isTrue(); + assertThat(FlatRecordTraversalObjectNodeEquality.equals(rightNode, leftNode)).isTrue(); + } + + @Test + public void shouldUseExactFlagToConsiderExtraFieldsInEquality_usingPrimitives() { + TypeState1 typeState1 = new TypeState1(); + typeState1.longField = 1L; + typeState1.stringField = "A"; + typeState1.doubleField = 1.0; + typeState1.basicIntField = 1; + typeState1.valueOnlyInTypeState1 = "A"; // This field being set should make the records unequal. + + writer1.reset(); + mapper1.writeFlat(typeState1, writer1); + FlatRecord rec1 = writer1.generateFlatRecord(); + + TypeState2 typeState2 = new TypeState2(); + typeState2.longField = 1L; + typeState2.stringField = "A"; + typeState2.doubleField = 1.0; + typeState2.basicIntField = 1; + writer2.reset(); + mapper2.writeFlat(typeState2, writer2); + FlatRecord rec2 = writer2.generateFlatRecord(); + + FlatRecordTraversalObjectNode leftNode = new FlatRecordTraversalObjectNode(rec1); + FlatRecordTraversalObjectNode rightNode = new FlatRecordTraversalObjectNode(rec2); + + assertThat(FlatRecordTraversalObjectNodeEquality.equals(leftNode, rightNode)).isTrue(); + assertThat(FlatRecordTraversalObjectNodeEquality.equals(rightNode, leftNode)).isTrue(); + } + + @Test + public void shouldFindThatRecordsAreNotEqualBecauseMapValuesDiffer() { + TypeState1 typeState1 = new TypeState1(); + typeState1.longField = 1L; + Map map1 = new HashMap<>(); + map1.put("A", new SubValue("A", "AA")); + map1.put("B", new SubValue("B", "BB")); + map1.put("D", new SubValue("D", "DD")); + typeState1.simpleMapField = map1; + + writer1.reset(); + mapper1.writeFlat(typeState1, writer1); + FlatRecord rec1 = writer1.generateFlatRecord(); + + TypeState2 typeState2 = new TypeState2(); + typeState2.longField = 1L; + Map map2 = new HashMap<>(); + map2.put("A", new SubValue("A", "AA")); + map2.put("B", new SubValue("B", "BB")); + map2.put("C", new SubValue("C", "CC")); + typeState2.simpleMapField = map2; + + writer2.reset(); + mapper2.writeFlat(typeState2, writer2); + FlatRecord rec2 = writer2.generateFlatRecord(); + + FlatRecordTraversalObjectNode leftNode = new FlatRecordTraversalObjectNode(rec1); + FlatRecordTraversalObjectNode rightNode = new FlatRecordTraversalObjectNode(rec2); + + assertThat(FlatRecordTraversalObjectNodeEquality.equals(leftNode, rightNode)).isFalse(); + assertThat(FlatRecordTraversalObjectNodeEquality.equals(rightNode, leftNode)).isFalse(); + } + + @Test + public void shouldFindSetAndMapFieldsAreEqualEvenIfOrderIsDifferent() { + TypeState1 typeState1 = new TypeState1(); + typeState1.longField = 1L; + typeState1.setOfScalars = new HashSet<>(Arrays.asList("B", "A")); + typeState1.setOfObjects = new HashSet<>(Arrays.asList( + new SubValue("B", "BB"), + new SubValue("C", "CC"), + new SubValue("A", "AA")) + ); + typeState1.simpleMapField = + new HashMap() {{ + put("B", new SubValue("B", "BB")); + put("C", new SubValue("C", "CC")); + put("A", new SubValue("A", "AA")); + }}; + + writer1.reset(); + mapper1.writeFlat(typeState1, writer1); + FlatRecord rec1 = writer1.generateFlatRecord(); + + TypeState2 typeState2 = new TypeState2(); + typeState2.longField = 1L; + typeState2.setOfScalars = new HashSet<>(Arrays.asList("A", "B")); + typeState2.setOfObjects = new HashSet<>(Arrays.asList( + new SubValue("A", "AA"), + new SubValue("B", "BB"), + new SubValue("C", "CC")) + ); + typeState2.simpleMapField = new HashMap() {{ + put("A", new SubValue("A", "AA")); + put("B", new SubValue("B", "BB")); + put("C", new SubValue("C", "CC")); + }}; + + writer2.reset(); + mapper2.writeFlat(typeState2, writer2); + FlatRecord rec2 = writer2.generateFlatRecord(); + FlatRecordTraversalObjectNode leftNode = new FlatRecordTraversalObjectNode(rec1); + FlatRecordTraversalObjectNode rightNode = new FlatRecordTraversalObjectNode(rec2); + + assertThat(FlatRecordTraversalObjectNodeEquality.equals(leftNode, rightNode)).isTrue(); + assertThat(FlatRecordTraversalObjectNodeEquality.equals(rightNode, leftNode)).isTrue(); + } + + + + + private FlatRecord createTestFlatRecord2() { + Movie movie = new Movie(); + movie.id = 1; + movie.title = "Movie1"; + movie.releaseYear = 2020; + movie.primaryGenre = "action"; + movie.maturityRating = new MaturityRating("PG", "Some advisory"); + movie.countries = new HashSet<>(); + movie.countries.add(new Country("US")); + movie.countries.add(new Country("CB")); + movie.tags = new HashMap<>(); + movie.tags.put(new Tag("Type"), new TagValue("Movie")); + movie.tags.put(new Tag("Genre"), new TagValue("action")); + movie.cast = new HashSet<>(); + movie.cast.add(new CastMember(1, "Benedict Cumberbatch", CastRole.ACTOR)); + movie.cast.add(new CastMember(2, "Martin Freeman", CastRole.ACTOR)); + movie.cast.add(new CastMember(2, "Quentin Tarantino", CastRole.DIRECTOR)); + movie.awardsReceived = new ArrayList<>(); + movie.awardsReceived.add(new Award("Oscar", 2020)); + movie.awardsReceived.add(new Award("Golden Globe", 2025)); + + SimpleHollowDataset dataset = SimpleHollowDataset.fromClassDefinitions(Movie.class); + FakeHollowSchemaIdentifierMapper idMapper = new FakeHollowSchemaIdentifierMapper(dataset); + HollowObjectMapper objMapper = new HollowObjectMapper(HollowWriteStateCreator.createWithSchemas(dataset.getSchemas())); + FlatRecordWriter flatRecordWriter = new FlatRecordWriter(dataset, idMapper); + + flatRecordWriter.reset(); + objMapper.writeFlat(movie, flatRecordWriter); + return flatRecordWriter.generateFlatRecord(); + } + + + @HollowTypeName(name = "TypeState") + @HollowPrimaryKey(fields = "longField") + public static class TypeState1 { + public Long longField; + public String stringField; + @HollowInline + public String inlineStringField; + public String emptyStringField; + public Double doubleField; + public int basicIntField = Integer.MIN_VALUE; // MIN_VALUE == null in Hollow + public SubValue objectField; + public List listOfScalars; + public Set setOfScalars; + public List listOfObjects; + public Set setOfObjects; + public Map simpleMapField; + + // For testing differences between type versions + public String valueOnlyInTypeState1; + public int basicIntFieldOnlyInTypeState1 = Integer.MIN_VALUE; // MIN_VALUE == null in Hollow + } + + public static class SubValue { + public String value; + @HollowInline + public String anotherValue; + + public SubValue(String value) { + this.value = value; + } + + public SubValue(String value, String anotherValue) { + this.value = value; + this.anotherValue = anotherValue; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof SubValue)) return false; + SubValue subValue = (SubValue) o; + return Objects.equals(value, subValue.value) && + Objects.equals(anotherValue, subValue.anotherValue); + } + + @Override + public int hashCode() { + return Objects.hash(value, anotherValue); + } + } + + @HollowTypeName(name = "TypeState") + @HollowPrimaryKey(fields = "longField") + public static class TypeState2 { + public Long longField; + public String stringField; + @HollowInline + public String inlineStringField; + public String emptyStringField; + public Double doubleField; + public int basicIntField = Integer.MIN_VALUE; // MIN_VALUE == null in Hollow + public SubValue objectField; + public List listOfScalars; + public Set setOfScalars; + public List listOfObjects; + public Set setOfObjects; + public Map simpleMapField; + + // For testing differences between type versions + public String valueOnlyInTypeState2; + } + + @HollowTypeName(name = "IntSetTypeState") + @HollowPrimaryKey(fields = "id") + public static class IntSetTypeState1 { + public String id; + public Set intSet; + } + + @HollowTypeName(name = "IntSetTypeState") + @HollowPrimaryKey(fields = "id") + public static class IntSetTypeState2 { + public String id; + public Set intSet; + } + + @HollowTypeName(name = "IntTypeState") + @HollowPrimaryKey(fields = "intA") + public static class IntTypeState1 { + public int intA = Integer.MIN_VALUE; // MIN_VALUE == null in Hollow; + public int intB = Integer.MIN_VALUE; // MIN_VALUE == null in Hollow; + } + + @HollowTypeName(name = "IntTypeState") + @HollowPrimaryKey(fields = "intA") + public static class IntTypeState2 { + public int intA = Integer.MIN_VALUE; // MIN_VALUE == null in Hollow; + public int intB = Integer.MIN_VALUE; // MIN_VALUE == null in Hollow; + } + + @HollowTypeName(name = "RecordWithSubObject") + @HollowPrimaryKey(fields = "id") + public static class RecordWithSubObject1 { + public String id; + public int intField; + public RecordSubObject subObject; + } + + @HollowTypeName(name = "RecordWithSubObject") + @HollowPrimaryKey(fields = "id") + public static class RecordWithSubObject2 { + public String id; + public int intField; + } + + @HollowTypeName(name = "RecordSubObject") + public static class RecordSubObject { + public String stringField; + public int intField; + } + +} \ No newline at end of file diff --git a/hollow/src/test/java/com/netflix/hollow/test/dto/Award.java b/hollow/src/test/java/com/netflix/hollow/test/dto/Award.java new file mode 100644 index 0000000000..7056a1e856 --- /dev/null +++ b/hollow/src/test/java/com/netflix/hollow/test/dto/Award.java @@ -0,0 +1,14 @@ +package com.netflix.hollow.test.dto; + +import com.netflix.hollow.core.write.objectmapper.HollowTypeName; + +public class Award { + @HollowTypeName(name = "TestAwardName") + public String name; + public int year; + + public Award(String name, int year) { + this.name = name; + this.year = year; + } +} diff --git a/hollow/src/test/java/com/netflix/hollow/test/dto/CastMember.java b/hollow/src/test/java/com/netflix/hollow/test/dto/CastMember.java new file mode 100644 index 0000000000..876292e969 --- /dev/null +++ b/hollow/src/test/java/com/netflix/hollow/test/dto/CastMember.java @@ -0,0 +1,15 @@ +package com.netflix.hollow.test.dto; + +import com.netflix.hollow.core.write.objectmapper.HollowInline; + +public class CastMember { + public int id; + @HollowInline String name; + public CastRole role; + + public CastMember(int id, String name, CastRole role) { + this.id = id; + this.name = name; + this.role = role; + } +} diff --git a/hollow/src/test/java/com/netflix/hollow/test/dto/CastRole.java b/hollow/src/test/java/com/netflix/hollow/test/dto/CastRole.java new file mode 100644 index 0000000000..884070fa58 --- /dev/null +++ b/hollow/src/test/java/com/netflix/hollow/test/dto/CastRole.java @@ -0,0 +1,8 @@ +package com.netflix.hollow.test.dto; + +public enum CastRole { + DIRECTOR, + ACTOR, + WRITER, + PRODUCER +} diff --git a/hollow/src/test/java/com/netflix/hollow/test/dto/Country.java b/hollow/src/test/java/com/netflix/hollow/test/dto/Country.java new file mode 100644 index 0000000000..78a6542c73 --- /dev/null +++ b/hollow/src/test/java/com/netflix/hollow/test/dto/Country.java @@ -0,0 +1,12 @@ +package com.netflix.hollow.test.dto; + + +import com.netflix.hollow.core.write.objectmapper.HollowInline; + +public class Country { + @HollowInline String value; + + public Country(String value) { + this.value = value; + } +} diff --git a/hollow/src/test/java/com/netflix/hollow/test/dto/MaturityRating.java b/hollow/src/test/java/com/netflix/hollow/test/dto/MaturityRating.java new file mode 100644 index 0000000000..f2e6760d0b --- /dev/null +++ b/hollow/src/test/java/com/netflix/hollow/test/dto/MaturityRating.java @@ -0,0 +1,23 @@ +package com.netflix.hollow.test.dto; + +import com.netflix.hollow.core.write.objectmapper.HollowTypeName; + +public class MaturityRating { + @HollowTypeName(name = "TestMaturityRatingName") + public String rating; + @HollowTypeName(name = "TestMaturityAdvisoryName") + public String advisory; + + public MaturityRating(String rating, String advisory) { + this.rating = rating; + this.advisory = advisory; + } + @Override + public boolean equals(Object obj) { + if (!(obj instanceof MaturityRating)) { + return false; + } + return ((MaturityRating)obj).rating.equals(rating) && ((MaturityRating)obj).advisory.equals(advisory); + } + +} diff --git a/hollow/src/test/java/com/netflix/hollow/test/dto/Movie.java b/hollow/src/test/java/com/netflix/hollow/test/dto/Movie.java new file mode 100644 index 0000000000..ec0d609653 --- /dev/null +++ b/hollow/src/test/java/com/netflix/hollow/test/dto/Movie.java @@ -0,0 +1,31 @@ +package com.netflix.hollow.test.dto; + + +import com.netflix.hollow.core.write.objectmapper.HollowHashKey; +import com.netflix.hollow.core.write.objectmapper.HollowPrimaryKey; +import com.netflix.hollow.core.write.objectmapper.HollowTypeName; + +import java.util.List; +import java.util.Map; +import java.util.Set; + +@HollowPrimaryKey(fields = {"id"}) +public class Movie { + public int id; + + @HollowTypeName(name = "MovieTitle") + public String title; + public int releaseYear; + @HollowTypeName(name = "MovieGenre") + public String primaryGenre; + public MaturityRating maturityRating; + + public Set countries; + @HollowHashKey(fields = {"value"}) + public Map tags; + + @HollowHashKey(fields = {"id"}) + public Set cast; + + public List awardsReceived; +} diff --git a/hollow/src/test/java/com/netflix/hollow/test/dto/Tag.java b/hollow/src/test/java/com/netflix/hollow/test/dto/Tag.java new file mode 100644 index 0000000000..d9cfa46619 --- /dev/null +++ b/hollow/src/test/java/com/netflix/hollow/test/dto/Tag.java @@ -0,0 +1,11 @@ +package com.netflix.hollow.test.dto; + +import com.netflix.hollow.core.write.objectmapper.HollowInline; + +public class Tag { + @HollowInline String value; + + public Tag(String value) { + this.value = value; + } +} diff --git a/hollow/src/test/java/com/netflix/hollow/test/dto/TagValue.java b/hollow/src/test/java/com/netflix/hollow/test/dto/TagValue.java new file mode 100644 index 0000000000..a741edf558 --- /dev/null +++ b/hollow/src/test/java/com/netflix/hollow/test/dto/TagValue.java @@ -0,0 +1,13 @@ +package com.netflix.hollow.test.dto; + +import com.netflix.hollow.core.write.objectmapper.HollowInline; + +public class TagValue { + @HollowInline public String value; + + public TagValue(String value) { + this.value = value; + } +} + +