Skip to content

Commit

Permalink
Rebased and brought in fixes from Steve
Browse files Browse the repository at this point in the history
  • Loading branch information
Sunjeet committed Jul 19, 2023
1 parent 4b9e7fc commit dd183c9
Show file tree
Hide file tree
Showing 9 changed files with 369 additions and 133 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,26 @@ public static int fieldHashCode(HollowObjectTypeDataAccess typeAccess, int ordin
throw new IllegalStateException("I don't know how to hash a " + schema.getFieldType(fieldPosition));
}

public static int hashObject(Object value) {
if(value instanceof Integer) {
return HollowReadFieldUtils.intHashCode((Integer)value);
} else if(value instanceof String) {
return HollowReadFieldUtils.stringHashCode((String)value);
} else if(value instanceof Float) {
return HollowReadFieldUtils.floatHashCode((Float)value);
} else if(value instanceof Double) {
return HollowReadFieldUtils.doubleHashCode((Double)value);
} else if(value instanceof Boolean) {
return HollowReadFieldUtils.booleanHashCode((Boolean) value);
} else if(value instanceof Long) {
return HollowReadFieldUtils.longHashCode((Long) value);
} else if(value instanceof byte[]) {
return HollowReadFieldUtils.byteArrayHashCode((byte[]) value);
} else {
throw new RuntimeException("Unable to hash field of type " + value.getClass().getName());
}
}

/**
* Determine whether two OBJECT field records are exactly equal.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@
@SuppressWarnings("restriction")
public class HollowObjectTypeMapper extends HollowTypeMapper {

private static Set<Class<?>> BOXED_WRAPPERS = new HashSet<>(Arrays.asList(Boolean.class, Integer.class, Short.class, Byte.class, Character.class, Long.class, Float.class, Double.class, String.class, byte[].class, Date.class));
private static Set<Class<?>> BOXED_WRAPPERS = new HashSet<>(Arrays.asList(Boolean.class, Integer.class, Short.class, Byte.class, Character.class, Long.class, Float.class, Double.class, String.class, byte[].class));

private static final Unsafe unsafe = HollowUnsafeHandle.getUnsafe();
private final HollowObjectMapper parentMapper;
Expand Down Expand Up @@ -195,7 +195,7 @@ protected Object parseFlatRecord(HollowSchema recordSchema, FlatRecordReader rea
HollowObjectSchema recordObjectSchema = (HollowObjectSchema) recordSchema;

Object obj = null;
if (BOXED_WRAPPERS.contains(clazz) || clazz.isEnum()) {
if (BOXED_WRAPPERS.contains(clazz)) {
// if `clazz` is a BoxedWrapper then by definition its OBJECT schema will have a single primitive
// field so find it in the FlatRecord and ignore all other fields.
for (int i = 0; i < recordObjectSchema.numFields(); i++) {
Expand Down Expand Up @@ -570,20 +570,6 @@ private Object parseBoxedWrapper(FlatRecordReader reader) {
case BYTES: {
return reader.readBytes();
}
case ENUM_NAME: {
String enumName = reader.readString();
if (enumName != null) {
return Enum.valueOf((Class<Enum>) clazz, enumName);
}
break;
}
case DATE_TIME: {
long value = reader.readLong();
if (value != Long.MIN_VALUE) {
return new Date(value);
}
break;
}
}
return null;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ public FlatRecordExtractor(HollowReadStateEngine extractFrom, HollowSchemaIdenti
this.recordCopiersByType = new HashMap<>();
}

public synchronized FlatRecord extract(String type, int ordinal) {
public FlatRecord extract(String type, int ordinal) {
ordinalRemapper.clear();
writer.reset();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,16 +64,7 @@ public void addTypeIndex(PrimaryKey primaryKey) {
}

public HollowHistoryTypeKeyIndex addTypeIndex(PrimaryKey primaryKey, HollowDataset dataModel) {
HollowHistoryTypeKeyIndex prevKeyIdx = typeKeyIndexes.get(primaryKey.getType());
HollowHistoryTypeKeyIndex keyIdx = new HollowHistoryTypeKeyIndex(primaryKey, dataModel);
// retain any previous indexed fields
if (prevKeyIdx != null) {
for (int i = 0; i < prevKeyIdx.getKeyFields().length; i++) {
if (prevKeyIdx.getKeyFieldIsIndexed()[i]) {
keyIdx.addFieldIndex(prevKeyIdx.getKeyFields()[i], dataModel);
}
}
}
typeKeyIndexes.put(primaryKey.getType(), keyIdx);
return keyIdx;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,10 +78,6 @@ public String[] getKeyFields() {
return primaryKey.getFieldPaths();
}

public boolean[] getKeyFieldIsIndexed() {
return keyFieldIsIndexed;
}

public void addFieldIndex(String fieldName, HollowDataset dataModel) {
String[] fieldPathParts = PrimaryKey.getCompleteFieldPathParts(dataModel, primaryKey.getType(), fieldName);
for (int i = 0; i < primaryKey.numFields(); i++) {
Expand All @@ -97,20 +93,18 @@ public void initializeKeySchema(HollowObjectTypeReadState initialTypeState) {
if (isInitialized) return;
HollowObjectSchema schema = initialTypeState.getSchema();

for (int i= 0; i < keyFieldNames.length; i ++) {
String[] keyFieldPart = keyFieldNames[i];
fieldTypes[i] = addSchemaField(schema, keyFieldPart, 0);
}
for (String[] keyFieldPart : keyFieldNames) addSchemaField(schema, keyFieldPart, 0);
isInitialized = true;
}

private FieldType addSchemaField(HollowObjectSchema schema, String[] keyFieldNames, int keyFieldPartPosition) {
private void addSchemaField(HollowObjectSchema schema, String[] keyFieldNames, int keyFieldPartPosition) {
int schemaPosition = schema.getPosition(keyFieldNames[keyFieldPartPosition]);
if (keyFieldPartPosition < keyFieldNames.length - 1) {
HollowObjectSchema nextPartSchema = (HollowObjectSchema) schema.getReferencedTypeState(schemaPosition).getSchema();
return addSchemaField(nextPartSchema, keyFieldNames, keyFieldPartPosition + 1);
addSchemaField(nextPartSchema, keyFieldNames, keyFieldPartPosition + 1);
} else {
fieldTypes[keyFieldPartPosition] = schema.getFieldType(schemaPosition);
}
return schema.getFieldType(schemaPosition);
}

private void initializeKeyParts(HollowDataset dataModel) {
Expand Down Expand Up @@ -158,13 +152,13 @@ private void populateAllCurrentRecordKeysIntoIndex(HollowObjectTypeReadState typ
}

private void writeKeyObject(HollowObjectTypeReadState typeState, int ordinal, boolean isDelta) {
int assignedOrdinal = isDelta ? maxIndexedOrdinal : ordinal;
maxIndexedOrdinal+=1;
int assignedOrdinal = maxIndexedOrdinal;
int assignedIndex = ordinalMapping.storeNewRecord(typeState, ordinal, assignedOrdinal);

// Identical record already in memory, no need to store fields
if(assignedIndex==ORDINAL_NONE)
return;
maxIndexedOrdinal+=1;

for (int i = 0; i < primaryKey.numFields(); i++)
writeKeyField(assignedOrdinal, i);
Expand All @@ -175,35 +169,14 @@ private void writeKeyField(int assignedOrdinal, int fieldIdx) {
return;

Object fieldObject = ordinalMapping.getFieldObject(assignedOrdinal, fieldIdx);
int fieldHash = HashCodes.hashInt(hashObject(fieldObject));
int fieldHash = HashCodes.hashInt(HollowReadFieldUtils.hashObject(fieldObject));
if(!ordinalFieldHashMapping.containsKey(fieldHash))
ordinalFieldHashMapping.put(fieldHash, new IntList());

IntList matchingFieldList = ordinalFieldHashMapping.get(fieldHash);
matchingFieldList.add(assignedOrdinal);
}

// Retain consistency with rest of Hollow Hashing when hashing boxed primitives
private int hashObject(Object value) {
if(value instanceof Integer) {
return HollowReadFieldUtils.intHashCode((Integer)value);
} else if(value instanceof String) {
return HollowReadFieldUtils.stringHashCode((String)value);
} else if(value instanceof Float) {
return HollowReadFieldUtils.floatHashCode((Float)value);
} else if(value instanceof Double) {
return HollowReadFieldUtils.doubleHashCode((Double)value);
} else if(value instanceof Boolean) {
return HollowReadFieldUtils.booleanHashCode((Boolean) value);
} else if(value instanceof Long) {
return HollowReadFieldUtils.longHashCode((Long) value);
} else if(value instanceof byte[]) {
return HollowReadFieldUtils.byteArrayHashCode((byte[]) value);
} else {
throw new RuntimeException("Unable to hash field of type " + value.getClass().getName());
}
}

public String getKeyDisplayString(int keyOrdinal) {
StringBuilder builder = new StringBuilder();
for (int i = 0; i < primaryKey.numFields(); i++) {
Expand All @@ -224,40 +197,54 @@ public IntList queryIndexedFields(final String query) {

for (int i = 0; i < primaryKey.numFields(); i++) {
int hashCode = 0;
Object objectToFind = null;
try {
switch (fieldTypes[i]) {
case INT:
final int queryInt = Integer.parseInt(query);
hashCode = HollowReadFieldUtils.intHashCode(queryInt);
objectToFind = queryInt;
break;
case LONG:
final long queryLong = Long.parseLong(query);
hashCode = HollowReadFieldUtils.longHashCode(queryLong);
objectToFind = queryLong;
break;
case STRING:
hashCode = HashCodes.hashCode(query);
objectToFind = query;
break;
case DOUBLE:
final double queryDouble = Double.parseDouble(query);
hashCode = HollowReadFieldUtils.doubleHashCode(queryDouble);
objectToFind = queryDouble;
break;
case FLOAT:
final float queryFloat = Float.parseFloat(query);
hashCode = HollowReadFieldUtils.floatHashCode(queryFloat);
objectToFind = queryFloat;
break;
default:
}
addMatches(HashCodes.hashInt(hashCode), matchingKeys);
addMatches(HashCodes.hashInt(hashCode), objectToFind, i, matchingKeys);
} catch(NumberFormatException ignore) {}
}
return matchingKeys;
}

public void addMatches(int hashCode, IntList results) {
public void addMatches(int hashCode, Object objectToMatch, int field, IntList results) {
if (!ordinalFieldHashMapping.containsKey(hashCode))
return;
IntList res2 = ordinalFieldHashMapping.get(hashCode);
results.addAll(res2);

IntList matchingOrdinals = ordinalFieldHashMapping.get(hashCode);
for(int i=0;i<matchingOrdinals.size();i++) {
int ordinal = matchingOrdinals.get(i);

Object matchingObject = ordinalMapping.getFieldObject(ordinal, field);
if(objectToMatch.equals(matchingObject)) {
results.add(ordinal);
}
}
}

public Object getKeyFieldValue(int keyFieldIdx, int keyOrdinal) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,16 +16,17 @@
*/
package com.netflix.hollow.tools.history.keyindex;

import static com.netflix.hollow.core.HollowConstants.ORDINAL_NONE;

import com.netflix.hollow.core.index.key.PrimaryKey;
import com.netflix.hollow.core.memory.encoding.HashCodes;
import com.netflix.hollow.core.read.HollowReadFieldUtils;
import com.netflix.hollow.core.read.engine.object.HollowObjectTypeReadState;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.tools.util.ObjectInternPool;

import static com.netflix.hollow.core.HollowConstants.ORDINAL_NONE;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;

public class HollowOrdinalMapper {
private int size = 0;
Expand Down Expand Up @@ -142,6 +143,17 @@ private void expandAndRehashTable() {

Object[] fieldObjects = indexFieldObjectMapping.get(i);
newIndexFieldObjectMapping.put(newIndex, fieldObjects);

// Store new index in old table so we can remap assignedOrdinalToIndex
ordinalMappings[i]=newIndex;
}

for (Map.Entry<Integer, Integer> entry : assignedOrdinalToIndex.entrySet()) {
int assignedOrdinal = entry.getKey();
int previousIndex = entry.getValue();
int newIndex = ordinalMappings[previousIndex];

assignedOrdinalToIndex.put(assignedOrdinal, newIndex);
}

this.ordinalMappings = newTable;
Expand All @@ -154,31 +166,20 @@ private int rehashExistingRecord(Integer[] newTable, int originalHash, int assig
while (newTable[newIndex]!=ORDINAL_NONE)
newIndex = (newIndex + 1) % newTable.length;

assignedOrdinalToIndex.put(assignedOrdinal, newIndex);
newTable[newIndex] = assignedOrdinal;
return newIndex;
}

public Object getFieldObject(int keyOrdinal, int fieldIndex) {
int index = assignedOrdinalToIndex.get(keyOrdinal);
public Object getFieldObject(int assignedOrdinal, int fieldIndex) {
int index = assignedOrdinalToIndex.get(assignedOrdinal);
return indexFieldObjectMapping.get(index)[fieldIndex];
}

private int hashKeyRecord(HollowObjectTypeReadState typeState, int ordinal) {
int hashCode = 0;

for (int i = 0; i < primaryKey.numFields(); i++) {

int lastFieldPath = keyFieldIndices[i].length - 1;
int fieldOrdinal = ordinal;
HollowObjectTypeReadState fieldTypeState = typeState;
for (int f = 0; f < lastFieldPath; f++) {
int fieldPosition = keyFieldIndices[i][f];
fieldOrdinal = fieldTypeState.readOrdinal(fieldOrdinal, fieldPosition);
fieldTypeState = (HollowObjectTypeReadState) fieldTypeState.getSchema().getReferencedTypeState(fieldPosition);
}

int fieldHashCode = HollowReadFieldUtils.fieldHashCode(fieldTypeState, fieldOrdinal, keyFieldIndices[i][lastFieldPath]);
Object fieldObjectToHash = readValueInState(typeState, ordinal, i);
int fieldHashCode = HollowReadFieldUtils.hashObject(fieldObjectToHash);
hashCode = (hashCode * 31) ^ fieldHashCode;
}
return HashCodes.hashInt(hashCode);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecord;
import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecordWriter;
import java.util.Arrays;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
Expand All @@ -27,28 +26,11 @@ public void setUp() {
mapper.initializeTypeState(InternalTypeA.class);
mapper.initializeTypeState(TypeWithCollections.class);
mapper.initializeTypeState(VersionedType2.class);
mapper.initializeTypeState(SpecialWrapperTypesTest.class);

flatRecordWriter = new FlatRecordWriter(
mapper.getStateEngine(), new FakeHollowSchemaIdentifierMapper(mapper.getStateEngine()));
}

@Test
public void testSpecialWrapperTypes() {
SpecialWrapperTypesTest wrapperTypesTest = new SpecialWrapperTypesTest();
wrapperTypesTest.id = 8797182L;
wrapperTypesTest.type = AnEnum.SOME_VALUE_C;
wrapperTypesTest.dateCreated = new Date();

flatRecordWriter.reset();
mapper.writeFlat(wrapperTypesTest, flatRecordWriter);
FlatRecord fr = flatRecordWriter.generateFlatRecord();

SpecialWrapperTypesTest result = mapper.readFlat(fr);

Assert.assertEquals(wrapperTypesTest, result);
}

@Test
public void testSimpleTypes() {
TypeWithAllSimpleTypes typeWithAllSimpleTypes = new TypeWithAllSimpleTypes();
Expand Down Expand Up @@ -551,37 +533,4 @@ public static class SubValue {
@HollowInline
public String anotherValue;
}

enum AnEnum {
SOME_VALUE_A,
SOME_VALUE_B,
SOME_VALUE_C,
}

@HollowTypeName(name = "SpecialWrapperTypesTest")
@HollowPrimaryKey(fields = {"id"})
static class SpecialWrapperTypesTest {
long id;
@HollowTypeName(name = "AnEnum")
AnEnum type;
Date dateCreated;

@Override
public boolean equals(Object o) {
if(o instanceof SpecialWrapperTypesTest) {
SpecialWrapperTypesTest other = (SpecialWrapperTypesTest)o;
return id == other.id && type == other.type && dateCreated.equals(other.dateCreated);
}
return false;
}

@Override
public String toString() {
return "SpecialWrapperTypesTest{" +
"id=" + id +
", type='" + type + '\'' +
", dateCreated=" + dateCreated +
'}';
}
}
}
Loading

0 comments on commit dd183c9

Please sign in to comment.