Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

#3129 Fix to verify DBJson dirtiness using same field ordering on both sides (due to PostgreSQL reordering fields for JsonB types) #3405

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -744,9 +744,9 @@ public Object parse(String value) {
}

/**
* creates a mutableHash for the given JSON value.
* creates a mutableHash for the given Object value.
*/
public MutableValueInfo createMutableInfo(String json) {
public MutableValueInfo createMutableInfo(Object value) {
throw new UnsupportedOperationException();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,22 +39,22 @@ public BeanProperty override(BeanPropertyOverride override) {
}

@Override
public MutableValueInfo createMutableInfo(String json) {
public MutableValueInfo createMutableInfo(Object value) {
if (sourceDetection) {
return new SourceMutableValue(scalarType, json);
return new SourceMutableValue(scalarType, value);
} else {
return new ChecksumMutableValue(scalarType, json);
return new ChecksumMutableValue(scalarType, value);
}
}

/**
* Next when no prior MutableValueInfo.
*/
private MutableValueNext next(String json) {
private MutableValueNext next(String json, Object value) {
if (sourceDetection) {
return new SourceMutableValue(scalarType, json);
return new SourceMutableValue(scalarType, value);
} else {
return new NextPair(json, new ChecksumMutableValue(scalarType, json));
return new NextPair(json, new ChecksumMutableValue(scalarType, value));
}
}

Expand All @@ -66,12 +66,13 @@ boolean checkMutable(Object value, boolean alreadyDirty, EntityBeanIntercept ebi
// mutation detection based on json content or checksum of json content
// only perform serialisation to json once
final String json = scalarType.format(value);

final MutableValueInfo oldHash = ebi.mutableInfo(propertyIndex);
if (oldHash == null) {
if (value == null) {
return false; // no change, still null
}
ebi.mutableNext(propertyIndex, next(json));
ebi.mutableNext(propertyIndex, next(json, value));
return true;
}
// only perform compute of checksum/hash once (if checksum based)
Expand All @@ -92,9 +93,8 @@ public Object readSet(DataReader reader, EntityBean bean) throws SQLException {
}
if (bean != null) {
setValue(bean, value);
String json = reader.popJson();
if (json != null) {
final MutableValueInfo hash = createMutableInfo(json);
if (value != null) {
final MutableValueInfo hash = createMutableInfo(value);
bean._ebean_getIntercept().mutableInfo(propertyIndex, hash);
}
}
Expand All @@ -110,10 +110,10 @@ public Object readSet(DataReader reader, EntityBean bean) throws SQLException {
public void setCacheDataValue(EntityBean bean, Object cacheData, PersistenceContext context) {
if (cacheData instanceof String) {
// parse back from string to support optimisation of java object serialisation
final String jsonContent = (String) cacheData;
final MutableValueInfo hash = createMutableInfo(jsonContent);
cacheData = scalarType.parse((String) cacheData);
final MutableValueInfo hash = createMutableInfo(cacheData);
bean._ebean_getIntercept().mutableInfo(propertyIndex, hash);
cacheData = scalarType.parse(jsonContent);

}
setValue(bean, cacheData);
}
Expand Down Expand Up @@ -150,9 +150,9 @@ private static final class ChecksumMutableValue implements MutableValueInfo {
private final ScalarType<?> parent;
private final long checksum;

ChecksumMutableValue(ScalarType<?> parent, String json) {
ChecksumMutableValue(ScalarType<?> parent, Object value) {
this.parent = parent;
this.checksum = Checksum.checksum(json);
this.checksum = Checksum.checksum(parent.format(value));
}

/**
Expand Down Expand Up @@ -185,33 +185,33 @@ public Object get() {
*/
private static final class SourceMutableValue implements MutableValueInfo, MutableValueNext {

private final String originalJson;
private final String formattedJson;
private final ScalarType<?> parent;

SourceMutableValue(ScalarType<?> parent, String json) {
SourceMutableValue(ScalarType<?> parent, Object value) {
this.parent = parent;
this.originalJson = json;
this.formattedJson = parent.format(value);
}

@Override
public MutableValueNext nextDirty(String json) {
return Objects.equals(originalJson, json) ? null : new SourceMutableValue(parent, json);
return Objects.equals(formattedJson, json) ? null : new SourceMutableValue(parent, parent.parse(json));
}

@Override
public boolean isEqualToObject(Object obj) {
return Objects.equals(originalJson, parent.format(obj));
return Objects.equals(formattedJson, parent.format(obj));
}

@Override
public Object get() {
// rebuild the 'oldValue' for change log etc
return parent.parse(originalJson);
return parent.parse(formattedJson);
}

@Override
public String content() {
return originalJson;
return formattedJson;
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ public void dmlBind(BindableRequest request, EntityBean bean) throws SQLExceptio
} else {
// on insert store hash and push json
final String json = prop.format(value);
final MutableValueInfo hash = prop.createMutableInfo(json);
final MutableValueInfo hash = prop.createMutableInfo(value);
bean._ebean_getIntercept().mutableInfo(propertyIndex, hash);
request.pushJson(json);
request.bind(value, prop);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -190,6 +190,9 @@ public final void jsonWrite(JsonGenerator writer, Set value) throws IOException

@SuppressWarnings("unchecked")
private Set convertList(List list) {
if (list == null) {
return null;
}
return new LinkedHashSet(list);
}
}
Expand Down
Loading