Added batch() to perform mutations as a batch at the commit of a unit of work. Lots of fixes.
This commit is contained in:
parent
a79e7dacf1
commit
eb22e3c72e
37 changed files with 818 additions and 291 deletions
9
NOTES
9
NOTES
|
@ -183,3 +183,12 @@ InsertOperation
|
|||
if (resultType == iface) {
|
||||
if (values.size() > 0 && includesNonIdentityValues) {
|
||||
boolean immutable = iface.isAssignableFrom(Drafted.class);
|
||||
-------------------
|
||||
|
||||
final Object value;
|
||||
if (method.getParameterCount() == 1 && args[0] instanceof Boolean && src instanceof ValueProviderMap) {
|
||||
value = ((ValueProviderMap)src).get(methodName, (Boolean)args[0]);
|
||||
} else {
|
||||
value = src.get(methodName);
|
||||
}
|
||||
--------------------
|
||||
|
|
|
@ -77,6 +77,10 @@ public abstract class AbstractEntityDraft<E> implements Drafted<E> {
|
|||
return value;
|
||||
}
|
||||
|
||||
public void put(String key, Object value) {
|
||||
backingMap.put(key, value);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public <T> T mutate(Getter<T> getter, T value) {
|
||||
return (T) mutate(this.<T>methodNameFor(getter), value);
|
||||
|
|
|
@ -104,8 +104,7 @@ public abstract class AbstractSessionOperations {
|
|||
return executeAsync(statement, uow, null, showValues);
|
||||
}
|
||||
|
||||
public ResultSetFuture executeAsync(
|
||||
Statement statement, UnitOfWork uow, Stopwatch timer, boolean showValues) {
|
||||
public ResultSetFuture executeAsync(Statement statement, UnitOfWork uow, Stopwatch timer, boolean showValues) {
|
||||
try {
|
||||
logStatement(statement, showValues);
|
||||
return currentSession().executeAsync(statement);
|
||||
|
|
|
@ -17,6 +17,7 @@ package net.helenus.core;
|
|||
|
||||
import static net.helenus.core.HelenusSession.deleted;
|
||||
|
||||
import com.datastax.driver.core.BatchStatement;
|
||||
import com.diffplug.common.base.Errors;
|
||||
import com.google.common.base.Stopwatch;
|
||||
import com.google.common.collect.HashBasedTable;
|
||||
|
@ -27,6 +28,7 @@ import java.util.concurrent.TimeUnit;
|
|||
import java.util.stream.Collectors;
|
||||
import net.helenus.core.cache.CacheUtil;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.core.operation.AbstractOperation;
|
||||
import net.helenus.support.Either;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -53,6 +55,8 @@ public abstract class AbstractUnitOfWork<E extends Exception>
|
|||
private List<CommitThunk> postCommit = new ArrayList<CommitThunk>();
|
||||
private boolean aborted = false;
|
||||
private boolean committed = false;
|
||||
private long committedAt = 0L;
|
||||
private List<AbstractOperation<?, ?>> operations = new ArrayList<AbstractOperation<?, ?>>();
|
||||
|
||||
protected AbstractUnitOfWork(HelenusSession session, AbstractUnitOfWork<E> parent) {
|
||||
Objects.requireNonNull(session, "containing session cannot be null");
|
||||
|
@ -256,11 +260,17 @@ public abstract class AbstractUnitOfWork<E extends Exception>
|
|||
String tableName = CacheUtil.schemaName(facets);
|
||||
for (Facet facet : facets) {
|
||||
if (!facet.fixed()) {
|
||||
if (facet.alone()) {
|
||||
String columnName = facet.name() + "==" + facet.value();
|
||||
cache.put(tableName, columnName, Either.left(value));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void batch(AbstractOperation s) {
|
||||
operations.add(s);
|
||||
}
|
||||
|
||||
private Iterator<AbstractUnitOfWork<E>> getChildNodes() {
|
||||
return nested.iterator();
|
||||
|
@ -273,6 +283,18 @@ public abstract class AbstractUnitOfWork<E extends Exception>
|
|||
* @throws E when the work overlaps with other concurrent writers.
|
||||
*/
|
||||
public PostCommitFunction<Void, Void> commit() throws E {
|
||||
|
||||
if (operations != null && operations.size() > 0) {
|
||||
if (parent == null) {
|
||||
BatchStatement batch = new BatchStatement();
|
||||
batch.addAll(operations.stream().map(o -> o.buildStatement(false)).collect(Collectors.toList()));
|
||||
batch.setConsistencyLevel(session.getDefaultConsistencyLevel());
|
||||
session.getSession().execute(batch);
|
||||
} else {
|
||||
parent.operations.addAll(operations);
|
||||
}
|
||||
}
|
||||
|
||||
// All nested UnitOfWork should be committed (not aborted) before calls to
|
||||
// commit, check.
|
||||
boolean canCommit = true;
|
||||
|
@ -404,4 +426,6 @@ public abstract class AbstractUnitOfWork<E extends Exception>
|
|||
public boolean hasCommitted() {
|
||||
return committed;
|
||||
}
|
||||
|
||||
public long committedAt() { return committedAt; }
|
||||
}
|
||||
|
|
|
@ -79,6 +79,12 @@ public final class Filter<V> {
|
|||
return new Filter<V>(node, postulate);
|
||||
}
|
||||
|
||||
public static <V> Filter<V> create(Getter<V> getter, HelenusPropertyNode node, Postulate<V> postulate) {
|
||||
Objects.requireNonNull(getter, "empty getter");
|
||||
Objects.requireNonNull(postulate, "empty operator");
|
||||
return new Filter<V>(node, postulate);
|
||||
}
|
||||
|
||||
public static <V> Filter<V> create(Getter<V> getter, Operator op, V val) {
|
||||
Objects.requireNonNull(getter, "empty getter");
|
||||
Objects.requireNonNull(op, "empty op");
|
||||
|
|
|
@ -195,11 +195,9 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
|
|||
|
||||
@Override
|
||||
public Object checkCache(String tableName, List<Facet> facets) {
|
||||
List<String[]> facetCombinations = CacheUtil.flattenFacets(facets);
|
||||
Object result = null;
|
||||
for (String[] combination : facetCombinations) {
|
||||
String cacheKey = tableName + "." + Arrays.toString(combination);
|
||||
result = sessionCache.get(cacheKey);
|
||||
for (String key : CacheUtil.flatKeys(tableName, facets)) {
|
||||
result = sessionCache.get(key);
|
||||
if (result != null) {
|
||||
return result;
|
||||
}
|
||||
|
@ -210,11 +208,7 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
|
|||
@Override
|
||||
public void cacheEvict(List<Facet> facets) {
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
List<String[]> facetCombinations = CacheUtil.flattenFacets(facets);
|
||||
for (String[] combination : facetCombinations) {
|
||||
String cacheKey = tableName + "." + Arrays.toString(combination);
|
||||
sessionCache.invalidate(cacheKey);
|
||||
}
|
||||
CacheUtil.flatKeys(tableName, facets).forEach(key -> sessionCache.invalidate(key));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -279,8 +273,10 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
|
|||
BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop);
|
||||
binder.setValueForProperty(prop, value.toString());
|
||||
} else {
|
||||
binder.setValueForProperty(
|
||||
prop, valueMap.get(prop.getPropertyName()).toString());
|
||||
Object v = valueMap.get(prop.getPropertyName());
|
||||
if (v != null) {
|
||||
binder.setValueForProperty(prop, v.toString());
|
||||
}
|
||||
}
|
||||
});
|
||||
if (binder.isBound()) {
|
||||
|
@ -305,11 +301,8 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
|
|||
.collect(Collectors.toList());
|
||||
for (List<Facet> facets : deletedFacetSets) {
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
List<String[]> combinations = CacheUtil.flattenFacets(facets);
|
||||
for (String[] combination : combinations) {
|
||||
String cacheKey = tableName + "." + Arrays.toString(combination);
|
||||
sessionCache.invalidate(cacheKey);
|
||||
}
|
||||
List<String> keys = CacheUtil.flatKeys(tableName, facets);
|
||||
keys.forEach(key -> sessionCache.invalidate(key));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -449,8 +442,7 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
|
|||
Objects.requireNonNull(getter1, "field 1 is empty");
|
||||
|
||||
HelenusPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
|
||||
return new SelectOperation<Tuple1<V1>>(
|
||||
this, new Mappers.Mapper1<V1>(getValueProvider(), p1), p1);
|
||||
return new SelectOperation<Tuple1<V1>>(this, new Mappers.Mapper1<V1>(getValueProvider(), p1), p1);
|
||||
}
|
||||
|
||||
public <V1, V2> SelectOperation<Tuple2<V1, V2>> select(Getter<V1> getter1, Getter<V2> getter2) {
|
||||
|
@ -459,8 +451,7 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
|
|||
|
||||
HelenusPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
|
||||
HelenusPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
|
||||
return new SelectOperation<Fun.Tuple2<V1, V2>>(
|
||||
this, new Mappers.Mapper2<V1, V2>(getValueProvider(), p1, p2), p1, p2);
|
||||
return new SelectOperation<Fun.Tuple2<V1, V2>>(this, new Mappers.Mapper2<V1, V2>(getValueProvider(), p1, p2), p1, p2);
|
||||
}
|
||||
|
||||
public <V1, V2, V3> SelectOperation<Fun.Tuple3<V1, V2, V3>> select(
|
||||
|
@ -734,8 +725,7 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
|
|||
}
|
||||
|
||||
public <T> InsertOperation<T> upsert(T pojo) {
|
||||
Objects.requireNonNull(
|
||||
pojo,
|
||||
Objects.requireNonNull(pojo,
|
||||
"supplied object must be either an instance of the entity class or a dsl for it, but cannot be null");
|
||||
HelenusEntity entity = null;
|
||||
try {
|
||||
|
|
|
@ -15,10 +15,12 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import com.datastax.driver.core.Statement;
|
||||
import com.google.common.base.Stopwatch;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.core.operation.AbstractOperation;
|
||||
|
||||
public interface UnitOfWork<X extends Exception> extends AutoCloseable {
|
||||
|
||||
|
@ -50,6 +52,10 @@ public interface UnitOfWork<X extends Exception> extends AutoCloseable {
|
|||
|
||||
boolean hasCommitted();
|
||||
|
||||
long committedAt();
|
||||
|
||||
void batch(AbstractOperation operation);
|
||||
|
||||
Optional<Object> cacheLookup(List<Facet> facets);
|
||||
|
||||
void cacheUpdate(Object pojo, List<Facet> facets);
|
||||
|
|
|
@ -17,6 +17,7 @@ package net.helenus.core.cache;
|
|||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
|
||||
|
@ -29,6 +30,8 @@ public class BoundFacet extends Facet<String> {
|
|||
this.properties.put(property, value);
|
||||
}
|
||||
|
||||
public Set<HelenusProperty> getProperties() { return properties.keySet(); }
|
||||
|
||||
public BoundFacet(String name, Map<HelenusProperty, Object> properties) {
|
||||
super(
|
||||
name,
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package net.helenus.core.cache;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
|
@ -17,8 +18,7 @@ public class CacheUtil {
|
|||
return out;
|
||||
}
|
||||
|
||||
private static void kCombinations(
|
||||
List<String> items, int n, int k, String[] arr, List<String[]> out) {
|
||||
private static void kCombinations(List<String> items, int n, int k, String[] arr, List<String[]> out) {
|
||||
if (k == 0) {
|
||||
out.add(arr.clone());
|
||||
} else {
|
||||
|
@ -29,6 +29,15 @@ public class CacheUtil {
|
|||
}
|
||||
}
|
||||
|
||||
public static List<String> flatKeys(String table, List<Facet> facets) {
|
||||
return flattenFacets(facets)
|
||||
.stream()
|
||||
.map(combination -> {
|
||||
return table + "." + Arrays.toString(combination);
|
||||
})
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
public static List<String[]> flattenFacets(List<Facet> facets) {
|
||||
List<String[]> combinations =
|
||||
CacheUtil.combinations(
|
||||
|
@ -41,11 +50,31 @@ public class CacheUtil {
|
|||
return facet.name() + "==" + facet.value();
|
||||
})
|
||||
.collect(Collectors.toList()));
|
||||
// TODO(gburd): rework so as to not generate the combinations at all rather than filter
|
||||
for (Facet facet : facets) {
|
||||
if (facet.fixed()) continue;
|
||||
if (facet.alone() && facet.combined() && true) continue;
|
||||
combinations = combinations
|
||||
.stream()
|
||||
.filter(combo -> {
|
||||
for (String c : combo) {
|
||||
// When used alone, this facet is not distinct so don't use it as a key.
|
||||
if (facet.alone() == false && c.equals(facet.name())) {
|
||||
return false;
|
||||
}
|
||||
// Don't use this facet in combination with others to create keys.
|
||||
if (facet.combined() == false && c.split("==")[0].equals(facet.name())) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
})
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
return combinations;
|
||||
}
|
||||
|
||||
public static Object merge(Object to, Object from) {
|
||||
// TODO(gburd): do a proper merge given that materialized views are cached with their table name
|
||||
if (to == from) {
|
||||
return to;
|
||||
} else {
|
||||
|
@ -71,4 +100,12 @@ public class CacheUtil {
|
|||
.map(facet -> facet.value().toString())
|
||||
.collect(Collectors.joining("."));
|
||||
}
|
||||
|
||||
public static String writeTimeKey(String propertyName) {
|
||||
return "_" + propertyName + "_writeTime";
|
||||
}
|
||||
|
||||
public static String ttlKey(String propertyName) {
|
||||
return "_" + propertyName + "_ttl";
|
||||
}
|
||||
}
|
||||
|
|
13
src/main/java/net/helenus/core/cache/Facet.java
vendored
13
src/main/java/net/helenus/core/cache/Facet.java
vendored
|
@ -21,6 +21,8 @@ public class Facet<T> {
|
|||
private final String name;
|
||||
private T value;
|
||||
private boolean fixed = false;
|
||||
private boolean alone = true;
|
||||
private boolean combined = true;
|
||||
|
||||
public Facet(String name) {
|
||||
this.name = name;
|
||||
|
@ -47,4 +49,15 @@ public class Facet<T> {
|
|||
public boolean fixed() {
|
||||
return fixed;
|
||||
}
|
||||
|
||||
public void setUniquelyIdentifyingWhenAlone(boolean alone) {
|
||||
this.alone = alone;
|
||||
}
|
||||
|
||||
public void setUniquelyIdentifyingWhenCombined(boolean combined) {
|
||||
this.combined = combined;
|
||||
}
|
||||
|
||||
public boolean alone() { return alone; }
|
||||
public boolean combined() { return combined; }
|
||||
}
|
||||
|
|
|
@ -25,16 +25,30 @@ import net.helenus.mapping.HelenusProperty;
|
|||
public class UnboundFacet extends Facet<String> {
|
||||
|
||||
private final List<HelenusProperty> properties;
|
||||
private final boolean alone;
|
||||
private final boolean combined;
|
||||
|
||||
public UnboundFacet(List<HelenusProperty> properties) {
|
||||
public UnboundFacet(List<HelenusProperty> properties, boolean alone, boolean combined) {
|
||||
super(SchemaUtil.createPrimaryKeyPhrase(properties));
|
||||
this.properties = properties;
|
||||
this.alone = alone;
|
||||
this.combined = combined;
|
||||
}
|
||||
|
||||
public UnboundFacet(HelenusProperty property) {
|
||||
public UnboundFacet(List<HelenusProperty> properties) {
|
||||
this(properties, true, true);
|
||||
}
|
||||
|
||||
public UnboundFacet(HelenusProperty property, boolean alone, boolean combined) {
|
||||
super(property.getPropertyName());
|
||||
properties = new ArrayList<HelenusProperty>();
|
||||
properties.add(property);
|
||||
this.alone = alone;
|
||||
this.combined = combined;
|
||||
}
|
||||
|
||||
public UnboundFacet(HelenusProperty property) {
|
||||
this(property, true, true);
|
||||
}
|
||||
|
||||
public List<HelenusProperty> getProperties() {
|
||||
|
@ -42,18 +56,22 @@ public class UnboundFacet extends Facet<String> {
|
|||
}
|
||||
|
||||
public Binder binder() {
|
||||
return new Binder(name(), properties);
|
||||
return new Binder(name(), properties, alone, combined);
|
||||
}
|
||||
|
||||
public static class Binder {
|
||||
|
||||
private final String name;
|
||||
private final boolean alone;
|
||||
private final boolean combined;
|
||||
private final List<HelenusProperty> properties = new ArrayList<HelenusProperty>();
|
||||
private Map<HelenusProperty, Object> boundProperties = new HashMap<HelenusProperty, Object>();
|
||||
|
||||
Binder(String name, List<HelenusProperty> properties) {
|
||||
Binder(String name, List<HelenusProperty> properties, boolean alone, boolean combined) {
|
||||
this.name = name;
|
||||
this.properties.addAll(properties);
|
||||
this.alone = alone;
|
||||
this.combined = combined;
|
||||
}
|
||||
|
||||
public Binder setValueForProperty(HelenusProperty prop, Object value) {
|
||||
|
@ -67,7 +85,10 @@ public class UnboundFacet extends Facet<String> {
|
|||
}
|
||||
|
||||
public BoundFacet bind() {
|
||||
return new BoundFacet(name, boundProperties);
|
||||
BoundFacet facet = new BoundFacet(name, boundProperties);
|
||||
facet.setUniquelyIdentifyingWhenAlone(alone);
|
||||
facet.setUniquelyIdentifyingWhenCombined(combined);
|
||||
return facet;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,6 +20,8 @@ import com.datastax.driver.core.ResultSet;
|
|||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.CompletionException;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import com.datastax.driver.core.Statement;
|
||||
import net.helenus.core.AbstractSessionOperations;
|
||||
import net.helenus.core.UnitOfWork;
|
||||
|
||||
|
@ -39,15 +41,8 @@ public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>>
|
|||
public E sync() throws TimeoutException {
|
||||
final Timer.Context context = requestLatency.time();
|
||||
try {
|
||||
ResultSet resultSet =
|
||||
this.execute(
|
||||
sessionOps,
|
||||
null,
|
||||
traceContext,
|
||||
queryExecutionTimeout,
|
||||
queryTimeoutUnits,
|
||||
showValues,
|
||||
false);
|
||||
ResultSet resultSet = this.execute(sessionOps,null, traceContext, queryExecutionTimeout, queryTimeoutUnits,
|
||||
showValues,false);
|
||||
return transform(resultSet);
|
||||
} finally {
|
||||
context.stop();
|
||||
|
@ -59,11 +54,7 @@ public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>>
|
|||
|
||||
final Timer.Context context = requestLatency.time();
|
||||
try {
|
||||
ResultSet resultSet =
|
||||
execute(
|
||||
sessionOps,
|
||||
uow,
|
||||
traceContext,
|
||||
ResultSet resultSet = execute(sessionOps, uow, traceContext,
|
||||
queryExecutionTimeout,
|
||||
queryTimeoutUnits,
|
||||
showValues,
|
||||
|
|
|
@ -64,9 +64,9 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
|
|||
try {
|
||||
Optional<E> result = Optional.empty();
|
||||
E cacheResult = null;
|
||||
boolean updateCache = isSessionCacheable() && checkCache;
|
||||
boolean updateCache = isSessionCacheable() && !ignoreCache();
|
||||
|
||||
if (checkCache && isSessionCacheable()) {
|
||||
if (updateCache) {
|
||||
List<Facet> facets = bindFacetValues();
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
cacheResult = (E) sessionOps.checkCache(tableName, facets);
|
||||
|
@ -119,7 +119,7 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
|
|||
E cachedResult = null;
|
||||
final boolean updateCache;
|
||||
|
||||
if (checkCache) {
|
||||
if (!ignoreCache()) {
|
||||
Stopwatch timer = Stopwatch.createStarted();
|
||||
try {
|
||||
List<Facet> facets = bindFacetValues();
|
||||
|
|
|
@ -44,11 +44,11 @@ import net.helenus.support.HelenusException;
|
|||
public abstract class AbstractStatementOperation<E, O extends AbstractStatementOperation<E, O>>
|
||||
extends Operation<E> {
|
||||
|
||||
protected boolean checkCache = true;
|
||||
protected boolean showValues = true;
|
||||
protected TraceContext traceContext;
|
||||
long queryExecutionTimeout = 10;
|
||||
TimeUnit queryTimeoutUnits = TimeUnit.SECONDS;
|
||||
private boolean ignoreCache = false;
|
||||
private ConsistencyLevel consistencyLevel;
|
||||
private ConsistencyLevel serialConsistencyLevel;
|
||||
private RetryPolicy retryPolicy;
|
||||
|
@ -66,12 +66,12 @@ public abstract class AbstractStatementOperation<E, O extends AbstractStatementO
|
|||
public abstract Statement buildStatement(boolean cached);
|
||||
|
||||
public O uncached(boolean enabled) {
|
||||
checkCache = enabled;
|
||||
ignoreCache = !enabled;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public O uncached() {
|
||||
checkCache = false;
|
||||
ignoreCache = true;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
|
@ -313,6 +313,10 @@ public abstract class AbstractStatementOperation<E, O extends AbstractStatementO
|
|||
throw new HelenusException("only RegularStatements can be prepared");
|
||||
}
|
||||
|
||||
protected boolean ignoreCache() {
|
||||
return ignoreCache;
|
||||
}
|
||||
|
||||
protected E checkCache(UnitOfWork<?> uow, List<Facet> facets) {
|
||||
E result = null;
|
||||
Optional<Object> optionalCachedResult = Optional.empty();
|
||||
|
|
|
@ -67,7 +67,7 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
|
|||
E cacheResult = null;
|
||||
boolean updateCache = isSessionCacheable();
|
||||
|
||||
if (checkCache && isSessionCacheable()) {
|
||||
if (!ignoreCache() && isSessionCacheable()) {
|
||||
List<Facet> facets = bindFacetValues();
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
cacheResult = (E) sessionOps.checkCache(tableName, facets);
|
||||
|
@ -126,7 +126,7 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
|
|||
E cachedResult = null;
|
||||
final boolean updateCache;
|
||||
|
||||
if (checkCache) {
|
||||
if (!ignoreCache()) {
|
||||
Stopwatch timer = Stopwatch.createStarted();
|
||||
try {
|
||||
List<Facet> facets = bindFacetValues();
|
||||
|
|
|
@ -37,6 +37,7 @@ public final class CountOperation extends AbstractFilterOperation<Long, CountOpe
|
|||
public CountOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity) {
|
||||
super(sessionOperations);
|
||||
this.entity = entity;
|
||||
//TODO(gburd): cache SELECT COUNT results within the scope of a UOW
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -28,6 +28,7 @@ import net.helenus.core.UnitOfWork;
|
|||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.support.HelenusException;
|
||||
import net.helenus.support.HelenusMappingException;
|
||||
|
||||
public final class DeleteOperation extends AbstractFilterOperation<ResultSet, DeleteOperation> {
|
||||
|
@ -152,6 +153,16 @@ public final class DeleteOperation extends AbstractFilterOperation<ResultSet, De
|
|||
return result;
|
||||
}
|
||||
|
||||
public ResultSet batch(UnitOfWork uow) throws TimeoutException {
|
||||
if (uow == null) {
|
||||
throw new HelenusException("UnitOfWork cannot be null when batching operations.");
|
||||
}
|
||||
|
||||
uow.cacheEvict(bindFacetValues());
|
||||
uow.batch(this);
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Facet> getFacets() {
|
||||
return entity.getFacets();
|
||||
|
|
|
@ -22,15 +22,19 @@ import com.datastax.driver.core.querybuilder.QueryBuilder;
|
|||
import java.util.*;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import net.helenus.core.AbstractSessionOperations;
|
||||
import net.helenus.core.Getter;
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.UnitOfWork;
|
||||
import net.helenus.core.cache.CacheUtil;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.core.cache.UnboundFacet;
|
||||
import net.helenus.core.reflect.DefaultPrimitiveTypes;
|
||||
import net.helenus.core.reflect.Drafted;
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.core.reflect.MapExportable;
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
import net.helenus.mapping.MappingUtil;
|
||||
|
@ -39,10 +43,12 @@ import net.helenus.support.Fun;
|
|||
import net.helenus.support.HelenusException;
|
||||
import net.helenus.support.HelenusMappingException;
|
||||
|
||||
import static net.helenus.mapping.ColumnType.CLUSTERING_COLUMN;
|
||||
import static net.helenus.mapping.ColumnType.PARTITION_KEY;
|
||||
|
||||
public final class InsertOperation<T> extends AbstractOperation<T, InsertOperation<T>> {
|
||||
|
||||
private final List<Fun.Tuple2<HelenusPropertyNode, Object>> values =
|
||||
new ArrayList<Fun.Tuple2<HelenusPropertyNode, Object>>();
|
||||
private final List<Fun.Tuple2<HelenusPropertyNode, Object>> values = new ArrayList<Fun.Tuple2<HelenusPropertyNode, Object>>();
|
||||
private final T pojo;
|
||||
private final Class<?> resultType;
|
||||
private HelenusEntity entity;
|
||||
|
@ -59,8 +65,7 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
|
|||
this.resultType = ResultSet.class;
|
||||
}
|
||||
|
||||
public InsertOperation(
|
||||
AbstractSessionOperations sessionOperations, Class<?> resultType, boolean ifNotExists) {
|
||||
public InsertOperation(AbstractSessionOperations sessionOperations, Class<?> resultType, boolean ifNotExists) {
|
||||
super(sessionOperations);
|
||||
|
||||
this.ifNotExists = ifNotExists;
|
||||
|
@ -68,12 +73,8 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
|
|||
this.resultType = resultType;
|
||||
}
|
||||
|
||||
public InsertOperation(
|
||||
AbstractSessionOperations sessionOperations,
|
||||
HelenusEntity entity,
|
||||
T pojo,
|
||||
Set<String> mutations,
|
||||
boolean ifNotExists) {
|
||||
public InsertOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity, T pojo,
|
||||
Set<String> mutations, boolean ifNotExists) {
|
||||
super(sessionOperations);
|
||||
|
||||
this.entity = entity;
|
||||
|
@ -248,11 +249,39 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
|
|||
}
|
||||
}
|
||||
|
||||
protected void adjustTtlAndWriteTime(MapExportable pojo) {
|
||||
if (ttl != null || timestamp != null) {
|
||||
List<String> propertyNames = values.stream()
|
||||
.map(t -> t._1.getProperty())
|
||||
.filter(prop -> {
|
||||
switch (prop.getColumnType()) {
|
||||
case PARTITION_KEY:
|
||||
case CLUSTERING_COLUMN:
|
||||
return false;
|
||||
default:
|
||||
return true;
|
||||
}
|
||||
})
|
||||
.map(prop -> prop.getColumnName().toCql(true))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
if (propertyNames.size() > 0) {
|
||||
if (ttl != null) {
|
||||
propertyNames.forEach(name -> pojo.put(CacheUtil.ttlKey(name), ttl));
|
||||
}
|
||||
if (timestamp != null) {
|
||||
propertyNames.forEach(name -> pojo.put(CacheUtil.writeTimeKey(name), timestamp));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public T sync() throws TimeoutException {
|
||||
T result = super.sync();
|
||||
if (entity.isCacheable() && result != null) {
|
||||
sessionOps.updateCache(result, entity.getFacets());
|
||||
sessionOps.updateCache(result, bindFacetValues());
|
||||
adjustTtlAndWriteTime((MapExportable)result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
@ -274,7 +303,8 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
|
|||
}
|
||||
Class<?> iface = entity.getMappingInterface();
|
||||
if (resultType == iface) {
|
||||
cacheUpdate(uow, result, entity.getFacets());
|
||||
cacheUpdate(uow, result, bindFacetValues());
|
||||
adjustTtlAndWriteTime((MapExportable)pojo);
|
||||
} else {
|
||||
if (entity.isCacheable()) {
|
||||
sessionOps.cacheEvict(bindFacetValues());
|
||||
|
@ -283,6 +313,24 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
|
|||
return result;
|
||||
}
|
||||
|
||||
public T batch(UnitOfWork uow) throws TimeoutException {
|
||||
if (uow == null) {
|
||||
throw new HelenusException("UnitOfWork cannot be null when batching operations.");
|
||||
}
|
||||
|
||||
if (this.entity != null && pojo != null) {
|
||||
Class<?> iface = this.entity.getMappingInterface();
|
||||
if (resultType == iface) {
|
||||
cacheUpdate(uow, pojo, bindFacetValues());
|
||||
adjustTtlAndWriteTime((MapExportable)pojo);
|
||||
uow.batch(this);
|
||||
return (T) pojo;
|
||||
}
|
||||
}
|
||||
|
||||
return sync(uow);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Facet> bindFacetValues() {
|
||||
List<Facet> facets = getFacets();
|
||||
|
|
|
@ -94,8 +94,7 @@ public abstract class Operation<E> {
|
|||
boolean cached)
|
||||
throws TimeoutException {
|
||||
|
||||
// Start recording in a Zipkin sub-span our execution time to perform this
|
||||
// operation.
|
||||
// Start recording in a Zipkin sub-span our execution time to perform this operation.
|
||||
Tracer tracer = session.getZipkinTracer();
|
||||
Span span = null;
|
||||
if (tracer != null && traceContext != null) {
|
||||
|
|
|
@ -63,4 +63,7 @@ public final class SelectFirstOperation<E>
|
|||
public boolean isSessionCacheable() {
|
||||
return delegate.isSessionCacheable();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean ignoreCache() { return delegate.ignoreCache(); }
|
||||
}
|
||||
|
|
|
@ -56,4 +56,7 @@ public final class SelectFirstTransformingOperation<R, E>
|
|||
public boolean isSessionCacheable() {
|
||||
return delegate.isSessionCacheable();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean ignoreCache() { return delegate.ignoreCache(); }
|
||||
}
|
||||
|
|
|
@ -29,8 +29,10 @@ import java.util.function.Function;
|
|||
import java.util.stream.Stream;
|
||||
import java.util.stream.StreamSupport;
|
||||
import net.helenus.core.*;
|
||||
import net.helenus.core.cache.CacheUtil;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.core.cache.UnboundFacet;
|
||||
import net.helenus.core.reflect.Entity;
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
|
@ -52,8 +54,10 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
|||
protected List<Ordering> ordering = null;
|
||||
protected Integer limit = null;
|
||||
protected boolean allowFiltering = false;
|
||||
|
||||
protected String alternateTableName = null;
|
||||
protected boolean isCacheable = false;
|
||||
protected boolean implmentsEntityType = false;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public SelectOperation(AbstractSessionOperations sessionOperations) {
|
||||
|
@ -89,7 +93,8 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
|||
.map(p -> new HelenusPropertyNode(p, Optional.empty()))
|
||||
.forEach(p -> this.props.add(p));
|
||||
|
||||
isCacheable = entity.isCacheable();
|
||||
this.isCacheable = entity.isCacheable();
|
||||
this.implmentsEntityType = entity.getMappingInterface().getClass().isAssignableFrom(Entity.class);
|
||||
}
|
||||
|
||||
public SelectOperation(
|
||||
|
@ -106,18 +111,21 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
|||
.map(p -> new HelenusPropertyNode(p, Optional.empty()))
|
||||
.forEach(p -> this.props.add(p));
|
||||
|
||||
isCacheable = entity.isCacheable();
|
||||
this.isCacheable = entity.isCacheable();
|
||||
this.implmentsEntityType = entity.getMappingInterface().getClass().isAssignableFrom(Entity.class);
|
||||
}
|
||||
|
||||
public SelectOperation(
|
||||
AbstractSessionOperations sessionOperations,
|
||||
Function<Row, E> rowMapper,
|
||||
public SelectOperation(AbstractSessionOperations sessionOperations, Function<Row, E> rowMapper,
|
||||
HelenusPropertyNode... props) {
|
||||
|
||||
super(sessionOperations);
|
||||
|
||||
this.rowMapper = rowMapper;
|
||||
Collections.addAll(this.props, props);
|
||||
|
||||
HelenusEntity entity = props[0].getEntity();
|
||||
this.isCacheable = entity.isCacheable();
|
||||
this.implmentsEntityType = entity.getMappingInterface().getClass().isAssignableFrom(Entity.class);
|
||||
}
|
||||
|
||||
public CountOperation count() {
|
||||
|
@ -264,9 +272,7 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
|||
+ prop.getEntity().getMappingInterface());
|
||||
}
|
||||
|
||||
// TODO(gburd): writeTime and ttl will be useful on merge() but cause object
|
||||
// identity to fail.
|
||||
if (false && cached) {
|
||||
if (cached && implmentsEntityType) {
|
||||
switch (prop.getProperty().getColumnType()) {
|
||||
case PARTITION_KEY:
|
||||
case CLUSTERING_COLUMN:
|
||||
|
@ -275,10 +281,10 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
|||
if (entity.equals(prop.getEntity())) {
|
||||
if (prop.getNext().isPresent()) {
|
||||
columnName = Iterables.getLast(prop).getColumnName().toCql(true);
|
||||
}
|
||||
if (!prop.getProperty().getDataType().isCollectionType()) {
|
||||
selection.writeTime(columnName).as(columnName + "_writeTime");
|
||||
selection.ttl(columnName).as(columnName + "_ttl");
|
||||
selection.writeTime(columnName).as(CacheUtil.writeTimeKey(columnName));
|
||||
selection.ttl(columnName).as(CacheUtil.ttlKey(columnName));
|
||||
}
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
@ -308,8 +314,13 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
|||
boolean isFirstIndex = true;
|
||||
for (Filter<?> filter : filters.values()) {
|
||||
where.and(filter.getClause(sessionOps.getValuePreparer()));
|
||||
HelenusProperty prop = filter.getNode().getProperty();
|
||||
if (allowFiltering == false) {
|
||||
HelenusProperty filterProp = filter.getNode().getProperty();
|
||||
HelenusProperty prop = props.stream()
|
||||
.map(HelenusPropertyNode::getProperty)
|
||||
.filter(thisProp -> thisProp.getPropertyName().equals(filterProp.getPropertyName()))
|
||||
.findFirst()
|
||||
.orElse(null);
|
||||
if (allowFiltering == false && prop != null) {
|
||||
switch (prop.getColumnType()) {
|
||||
case PARTITION_KEY:
|
||||
break;
|
||||
|
|
|
@ -56,4 +56,10 @@ public final class SelectTransformingOperation<R, E>
|
|||
public Stream<R> transform(ResultSet resultSet) {
|
||||
return delegate.transform(resultSet).map(fn);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isSessionCacheable() { return delegate.isSessionCacheable(); }
|
||||
|
||||
@Override
|
||||
public boolean ignoreCache() { return delegate.ignoreCache(); }
|
||||
}
|
||||
|
|
|
@ -26,6 +26,7 @@ import java.util.function.Function;
|
|||
import java.util.stream.Collectors;
|
||||
import net.helenus.core.*;
|
||||
import net.helenus.core.cache.BoundFacet;
|
||||
import net.helenus.core.cache.CacheUtil;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.core.reflect.MapExportable;
|
||||
|
@ -38,6 +39,9 @@ import net.helenus.support.HelenusException;
|
|||
import net.helenus.support.HelenusMappingException;
|
||||
import net.helenus.support.Immutables;
|
||||
|
||||
import static net.helenus.mapping.ColumnType.CLUSTERING_COLUMN;
|
||||
import static net.helenus.mapping.ColumnType.PARTITION_KEY;
|
||||
|
||||
public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateOperation<E>> {
|
||||
|
||||
private final Map<Assignment, BoundFacet> assignments = new HashMap<>();
|
||||
|
@ -65,8 +69,13 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
super(sessionOperations);
|
||||
this.draft = null;
|
||||
this.draftMap = null;
|
||||
this.pojo = pojo;
|
||||
|
||||
if (pojo != null) {
|
||||
this.entity = Helenus.resolve(MappingUtil.getMappingInterface(pojo));
|
||||
if (this.entity != null && entity.isCacheable() && pojo instanceof MapExportable) {
|
||||
this.pojo = pojo;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public UpdateOperation(
|
||||
|
@ -97,15 +106,10 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
}
|
||||
}
|
||||
|
||||
if (entity != null) {
|
||||
if (entity.isCacheable() && pojo != null && pojo instanceof MapExportable) {
|
||||
if (pojo != null) {
|
||||
if (!BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop).equals(v)) {
|
||||
String key = prop.getPropertyName();
|
||||
Map<String, Object> map = ((MapExportable) pojo).toMap();
|
||||
if (!(map instanceof ValueProviderMap)) {
|
||||
if (map.get(key) != v) {
|
||||
map.put(key, v);
|
||||
}
|
||||
}
|
||||
((MapExportable)pojo).put(key, v);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -133,13 +137,14 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(counterGetter);
|
||||
|
||||
BoundFacet facet = null;
|
||||
if (pojo != null) {
|
||||
HelenusProperty prop = p.getProperty();
|
||||
if (pojo != null) {
|
||||
Long value = (Long) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop);
|
||||
facet = new BoundFacet(prop, value + delta);
|
||||
} else if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
String key = prop.getPropertyName();
|
||||
draftMap.put(key, (Long) draftMap.get(key) + delta);
|
||||
facet = new BoundFacet(prop, draftMap.get(key));
|
||||
}
|
||||
|
||||
assignments.put(QueryBuilder.incr(p.getColumnName(), delta), facet);
|
||||
|
@ -160,13 +165,14 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(counterGetter);
|
||||
|
||||
BoundFacet facet = null;
|
||||
if (pojo != null) {
|
||||
HelenusProperty prop = p.getProperty();
|
||||
if (pojo != null) {
|
||||
Long value = (Long) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop);
|
||||
facet = new BoundFacet(prop, value - delta);
|
||||
} else if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
String key = prop.getPropertyName();
|
||||
draftMap.put(key, (Long) draftMap.get(key) - delta);
|
||||
facet = new BoundFacet(prop, draftMap.get(key));
|
||||
}
|
||||
|
||||
assignments.put(QueryBuilder.decr(p.getColumnName(), delta), facet);
|
||||
|
@ -191,18 +197,21 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
Object valueObj = prepareSingleListValue(p, value);
|
||||
|
||||
BoundFacet facet = null;
|
||||
if (pojo != null) {
|
||||
final List<V> list;
|
||||
final BoundFacet facet;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
List<V> list =
|
||||
new ArrayList<V>(
|
||||
(List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
|
||||
if (pojo != null) {
|
||||
list = (List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
list.add(0, value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
String key = prop.getPropertyName();
|
||||
list = (List<V>) draftMap.get(key);
|
||||
list.add(0, value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else {
|
||||
list = null;
|
||||
facet = null;
|
||||
}
|
||||
|
||||
assignments.put(QueryBuilder.prepend(p.getColumnName(), valueObj), facet);
|
||||
|
@ -220,18 +229,21 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
List valueObj = prepareListValue(p, value);
|
||||
|
||||
BoundFacet facet = null;
|
||||
if (pojo != null) {
|
||||
final List<V> list;
|
||||
final BoundFacet facet;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
List<V> list =
|
||||
new ArrayList<V>(
|
||||
(List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
|
||||
if (pojo != null) {
|
||||
list = (List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
list.addAll(0, value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else if (draft != null && value.size() > 0) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
list = (List<V>) draftMap.get(key);
|
||||
list.addAll(0, value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else {
|
||||
list = null;
|
||||
facet = null;
|
||||
}
|
||||
|
||||
assignments.put(QueryBuilder.prependAll(p.getColumnName(), valueObj), facet);
|
||||
|
@ -249,16 +261,14 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
Object valueObj = prepareSingleListValue(p, value);
|
||||
|
||||
BoundFacet facet = null;
|
||||
if (pojo != null || draft != null) {
|
||||
List<V> list;
|
||||
final BoundFacet facet;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
if (pojo != null || draft != null) {
|
||||
final List<V> list;
|
||||
if (pojo != null) {
|
||||
list =
|
||||
new ArrayList<V>(
|
||||
(List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
|
||||
list = (List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
} else {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
String key = prop.getPropertyName();
|
||||
list = (List<V>) draftMap.get(key);
|
||||
}
|
||||
if (idx < 0) {
|
||||
|
@ -270,6 +280,8 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
}
|
||||
list.add(0, value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else {
|
||||
facet = null;
|
||||
}
|
||||
|
||||
assignments.put(QueryBuilder.setIdx(p.getColumnName(), idx, valueObj), facet);
|
||||
|
@ -287,18 +299,21 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
Object valueObj = prepareSingleListValue(p, value);
|
||||
|
||||
BoundFacet facet = null;
|
||||
if (pojo != null) {
|
||||
final List<V> list;
|
||||
final BoundFacet facet;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
List<V> list =
|
||||
new ArrayList<V>(
|
||||
(List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
|
||||
if (pojo != null) {
|
||||
list = (List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
list.add(value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
String key = prop.getPropertyName();
|
||||
list = (List<V>) draftMap.get(key);
|
||||
list.add(value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else {
|
||||
list = null;
|
||||
facet = null;
|
||||
}
|
||||
assignments.put(QueryBuilder.append(p.getColumnName(), valueObj), facet);
|
||||
|
||||
|
@ -315,18 +330,21 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
List valueObj = prepareListValue(p, value);
|
||||
|
||||
BoundFacet facet = null;
|
||||
if (pojo != null) {
|
||||
final List<V> list;
|
||||
final BoundFacet facet;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
List<V> list =
|
||||
new ArrayList<V>(
|
||||
(List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
|
||||
if (pojo != null) {
|
||||
list = (List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
list.addAll(value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else if (draft != null && value.size() > 0) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
String key = prop.getPropertyName();
|
||||
list = (List<V>) draftMap.get(key);
|
||||
list.addAll(value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else {
|
||||
list = null;
|
||||
facet = null;
|
||||
}
|
||||
assignments.put(QueryBuilder.appendAll(p.getColumnName(), valueObj), facet);
|
||||
|
||||
|
@ -343,18 +361,21 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
Object valueObj = prepareSingleListValue(p, value);
|
||||
|
||||
BoundFacet facet = null;
|
||||
if (pojo != null) {
|
||||
final List<V> list;
|
||||
final BoundFacet facet;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
List<V> list =
|
||||
new ArrayList<V>(
|
||||
(List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
|
||||
if (pojo != null) {
|
||||
list = (List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
list.remove(value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
String key = prop.getPropertyName();
|
||||
list = (List<V>) draftMap.get(key);
|
||||
list.remove(value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else {
|
||||
list = null;
|
||||
facet = null;
|
||||
}
|
||||
assignments.put(QueryBuilder.discard(p.getColumnName(), valueObj), facet);
|
||||
|
||||
|
@ -371,18 +392,21 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
List valueObj = prepareListValue(p, value);
|
||||
|
||||
BoundFacet facet = null;
|
||||
if (pojo != null) {
|
||||
final List<V> list;
|
||||
final BoundFacet facet;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
List<V> list =
|
||||
new ArrayList<V>(
|
||||
(List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
|
||||
if (pojo != null) {
|
||||
list = (List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
list.removeAll(value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
String key = prop.getPropertyName();
|
||||
list = (List<V>) draftMap.get(key);
|
||||
list.removeAll(value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else {
|
||||
list = null;
|
||||
facet = null;
|
||||
}
|
||||
assignments.put(QueryBuilder.discardAll(p.getColumnName(), valueObj), facet);
|
||||
|
||||
|
@ -396,8 +420,7 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
|
||||
Object valueObj = value;
|
||||
|
||||
Optional<Function<Object, Object>> converter =
|
||||
prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
List convertedList = (List) converter.get().apply(Immutables.listOf(value));
|
||||
valueObj = convertedList.get(0);
|
||||
|
@ -412,8 +435,7 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
|
||||
List valueObj = value;
|
||||
|
||||
Optional<Function<Object, Object>> converter =
|
||||
prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
valueObj = (List) converter.get().apply(value);
|
||||
}
|
||||
|
@ -437,17 +459,21 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
|
||||
Object valueObj = prepareSingleSetValue(p, value);
|
||||
|
||||
BoundFacet facet = null;
|
||||
if (pojo != null) {
|
||||
final Set<V> set;
|
||||
final BoundFacet facet;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
Set<V> set =
|
||||
new HashSet<V>((Set<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
|
||||
if (pojo != null) {
|
||||
set = (Set<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
set.add(value);
|
||||
facet = new BoundFacet(prop, set);
|
||||
} else if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
Set<V> set = (Set<V>) draftMap.get(key);
|
||||
String key = prop.getPropertyName();
|
||||
set = (Set<V>) draftMap.get(key);
|
||||
set.add(value);
|
||||
facet = new BoundFacet(prop, set);
|
||||
} else {
|
||||
set = null;
|
||||
facet = null;
|
||||
}
|
||||
assignments.put(QueryBuilder.add(p.getColumnName(), valueObj), facet);
|
||||
|
||||
|
@ -464,17 +490,21 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
|
||||
Set valueObj = prepareSetValue(p, value);
|
||||
|
||||
BoundFacet facet = null;
|
||||
if (pojo != null) {
|
||||
final Set<V> set;
|
||||
final BoundFacet facet;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
Set<V> set =
|
||||
new HashSet<V>((Set<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
|
||||
if (pojo != null) {
|
||||
set = (Set<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
set.addAll(value);
|
||||
facet = new BoundFacet(prop, set);
|
||||
} else if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
Set<V> set = (Set<V>) draftMap.get(key);
|
||||
String key = prop.getPropertyName();
|
||||
set = (Set<V>) draftMap.get(key);
|
||||
set.addAll(value);
|
||||
facet = new BoundFacet(prop, set);
|
||||
} else {
|
||||
set = null;
|
||||
facet = null;
|
||||
}
|
||||
assignments.put(QueryBuilder.addAll(p.getColumnName(), valueObj), facet);
|
||||
|
||||
|
@ -491,17 +521,21 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
|
||||
Object valueObj = prepareSingleSetValue(p, value);
|
||||
|
||||
BoundFacet facet = null;
|
||||
if (pojo != null) {
|
||||
final Set<V> set;
|
||||
final BoundFacet facet;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
Set<V> set =
|
||||
new HashSet<V>((Set<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
|
||||
if (pojo != null) {
|
||||
set = (Set<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
set.remove(value);
|
||||
facet = new BoundFacet(prop, set);
|
||||
} else if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
Set<V> set = (Set<V>) draftMap.get(key);
|
||||
String key = prop.getPropertyName();
|
||||
set = (Set<V>) draftMap.get(key);
|
||||
set.remove(value);
|
||||
facet = new BoundFacet(prop, set);
|
||||
} else {
|
||||
set = null;
|
||||
facet = null;
|
||||
}
|
||||
assignments.put(QueryBuilder.remove(p.getColumnName(), valueObj), facet);
|
||||
|
||||
|
@ -518,17 +552,21 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
|
||||
Set valueObj = prepareSetValue(p, value);
|
||||
|
||||
BoundFacet facet = null;
|
||||
if (pojo != null) {
|
||||
final Set<V> set;
|
||||
final BoundFacet facet;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
Set<V> set =
|
||||
new HashSet<V>((Set<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
|
||||
if (pojo != null) {
|
||||
set = (Set<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
set.removeAll(value);
|
||||
facet = new BoundFacet(prop, set);
|
||||
} else if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
Set<V> set = (Set<V>) draftMap.get(key);
|
||||
String key = prop.getPropertyName();
|
||||
set = (Set<V>) draftMap.get(key);
|
||||
set.removeAll(value);
|
||||
facet = new BoundFacet(prop, set);
|
||||
} else {
|
||||
set = null;
|
||||
facet = null;
|
||||
}
|
||||
assignments.put(QueryBuilder.removeAll(p.getColumnName(), valueObj), facet);
|
||||
|
||||
|
@ -542,8 +580,7 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusProperty prop = p.getProperty();
|
||||
Object valueObj = value;
|
||||
|
||||
Optional<Function<Object, Object>> converter =
|
||||
prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
Set convertedSet = (Set) converter.get().apply(Immutables.setOf(value));
|
||||
valueObj = convertedSet.iterator().next();
|
||||
|
@ -557,8 +594,7 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusProperty prop = p.getProperty();
|
||||
Set valueObj = value;
|
||||
|
||||
Optional<Function<Object, Object>> converter =
|
||||
prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
valueObj = (Set) converter.get().apply(value);
|
||||
}
|
||||
|
@ -582,22 +618,24 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(mapGetter);
|
||||
HelenusProperty prop = p.getProperty();
|
||||
|
||||
BoundFacet facet = null;
|
||||
final Map<K, V> map;
|
||||
final BoundFacet facet;
|
||||
if (pojo != null) {
|
||||
Map<K, V> map =
|
||||
new HashMap<K, V>(
|
||||
(Map<K, V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
|
||||
map = (Map<K, V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
map.put(key, value);
|
||||
facet = new BoundFacet(prop, map);
|
||||
} else if (draft != null) {
|
||||
((Map<K, V>) draftMap.get(prop.getPropertyName())).put(key, value);
|
||||
map = (Map<K, V>) draftMap.get(prop.getPropertyName());
|
||||
map.put(key, value);
|
||||
facet = new BoundFacet(prop, map);
|
||||
} else {
|
||||
map = null;
|
||||
facet = null;
|
||||
}
|
||||
|
||||
Optional<Function<Object, Object>> converter =
|
||||
prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
Map<Object, Object> convertedMap =
|
||||
(Map<Object, Object>) converter.get().apply(Immutables.mapOf(key, value));
|
||||
Map<Object, Object> convertedMap = (Map<Object, Object>) converter.get().apply(Immutables.mapOf(key, value));
|
||||
for (Map.Entry<Object, Object> e : convertedMap.entrySet()) {
|
||||
assignments.put(QueryBuilder.put(p.getColumnName(), e.getKey(), e.getValue()), facet);
|
||||
}
|
||||
|
@ -618,19 +656,22 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(mapGetter);
|
||||
HelenusProperty prop = p.getProperty();
|
||||
|
||||
BoundFacet facet = null;
|
||||
final Map<K, V> newMap;
|
||||
final BoundFacet facet;
|
||||
if (pojo != null) {
|
||||
Map<K, V> newMap =
|
||||
new HashMap<K, V>(
|
||||
(Map<K, V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
|
||||
newMap = (Map<K, V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
newMap.putAll(map);
|
||||
facet = new BoundFacet(prop, newMap);
|
||||
} else if (draft != null) {
|
||||
((Map<K, V>) draftMap.get(prop.getPropertyName())).putAll(map);
|
||||
newMap = (Map<K, V>) draftMap.get(prop.getPropertyName());
|
||||
newMap.putAll(map);
|
||||
facet = new BoundFacet(prop, newMap);
|
||||
} else {
|
||||
newMap = null;
|
||||
facet = null;
|
||||
}
|
||||
|
||||
Optional<Function<Object, Object>> converter =
|
||||
prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
Map convertedMap = (Map) converter.get().apply(map);
|
||||
assignments.put(QueryBuilder.putAll(p.getColumnName(), convertedMap), facet);
|
||||
|
@ -718,14 +759,36 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
}
|
||||
}
|
||||
|
||||
private void adjustTtlAndWriteTime(MapExportable pojo) {
|
||||
if (ttl != null || timestamp != null) {
|
||||
List<String> names = new ArrayList<String>(assignments.size());
|
||||
for (BoundFacet facet : assignments.values()) {
|
||||
for (HelenusProperty prop : facet.getProperties()) {
|
||||
names.add(prop.getColumnName().toCql(true));
|
||||
}
|
||||
}
|
||||
|
||||
if (names.size() > 0) {
|
||||
if (ttl != null) {
|
||||
names.forEach(name -> pojo.put(CacheUtil.ttlKey(name), ttl));
|
||||
}
|
||||
if (timestamp != null) {
|
||||
names.forEach(name -> pojo.put(CacheUtil.writeTimeKey(name), timestamp));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public E sync() throws TimeoutException {
|
||||
E result = super.sync();
|
||||
if (entity.isCacheable()) {
|
||||
if (draft != null) {
|
||||
sessionOps.updateCache(draft, bindFacetValues());
|
||||
adjustTtlAndWriteTime(draft);
|
||||
} else if (pojo != null) {
|
||||
sessionOps.updateCache(pojo, bindFacetValues());
|
||||
adjustTtlAndWriteTime((MapExportable)pojo);
|
||||
} else {
|
||||
sessionOps.cacheEvict(bindFacetValues());
|
||||
}
|
||||
|
@ -741,13 +804,40 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
E result = super.sync(uow);
|
||||
if (draft != null) {
|
||||
cacheUpdate(uow, result, bindFacetValues());
|
||||
adjustTtlAndWriteTime(draft);
|
||||
} else if (pojo != null) {
|
||||
cacheUpdate(uow, (E) pojo, bindFacetValues());
|
||||
adjustTtlAndWriteTime((MapExportable)pojo);
|
||||
return (E) pojo;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public E batch(UnitOfWork uow) throws TimeoutException {
|
||||
if (uow == null) {
|
||||
throw new HelenusException("UnitOfWork cannot be null when batching operations.");
|
||||
}
|
||||
|
||||
final E result;
|
||||
if (draft != null) {
|
||||
result = draft.build();
|
||||
adjustTtlAndWriteTime(draft);
|
||||
} else if (pojo != null) {
|
||||
result = (E) pojo;
|
||||
adjustTtlAndWriteTime((MapExportable)pojo);
|
||||
} else {
|
||||
result = null;
|
||||
}
|
||||
|
||||
if (result != null) {
|
||||
cacheUpdate(uow, result, bindFacetValues());
|
||||
uow.batch(this);
|
||||
return result;
|
||||
}
|
||||
|
||||
return sync(uow);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Facet> bindFacetValues() {
|
||||
List<Facet> facets = bindFacetValues(entity.getFacets());
|
||||
|
|
29
src/main/java/net/helenus/core/reflect/Entity.java
Normal file
29
src/main/java/net/helenus/core/reflect/Entity.java
Normal file
|
@ -0,0 +1,29 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.helenus.core.reflect;
|
||||
|
||||
import net.helenus.core.Getter;
|
||||
|
||||
public interface Entity {
|
||||
String WRITTEN_AT_METHOD = "writtenAt";
|
||||
String TTL_OF_METHOD = "ttlOf";
|
||||
|
||||
default Long writtenAt(Getter getter) { return 0L; }
|
||||
default Long writtenAt(String prop) { return 0L; };
|
||||
|
||||
default Integer ttlOf(Getter getter) { return 0; };
|
||||
default Integer ttlOf(String prop) {return 0; };
|
||||
}
|
|
@ -18,8 +18,10 @@ package net.helenus.core.reflect;
|
|||
import java.util.Map;
|
||||
|
||||
public interface MapExportable {
|
||||
|
||||
public static final String TO_MAP_METHOD = "toMap";
|
||||
String TO_MAP_METHOD = "toMap";
|
||||
String PUT_METHOD = "put";
|
||||
|
||||
Map<String, Object> toMap();
|
||||
default Map<String, Object> toMap(boolean mutable) { return null; }
|
||||
default void put(String key, Object value) { }
|
||||
}
|
||||
|
|
|
@ -15,6 +15,14 @@
|
|||
*/
|
||||
package net.helenus.core.reflect;
|
||||
|
||||
import net.helenus.core.Getter;
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.cache.CacheUtil;
|
||||
import net.helenus.mapping.MappingUtil;
|
||||
import net.helenus.mapping.annotation.Transient;
|
||||
import net.helenus.mapping.value.ValueProviderMap;
|
||||
import net.helenus.support.HelenusException;
|
||||
|
||||
import java.io.InvalidObjectException;
|
||||
import java.io.ObjectInputStream;
|
||||
import java.io.ObjectStreamException;
|
||||
|
@ -24,18 +32,15 @@ import java.lang.reflect.Constructor;
|
|||
import java.lang.reflect.InvocationHandler;
|
||||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.Proxy;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.mapping.annotation.Transient;
|
||||
import net.helenus.mapping.value.ValueProviderMap;
|
||||
import net.helenus.support.HelenusException;
|
||||
|
||||
public class MapperInvocationHandler<E> implements InvocationHandler, Serializable {
|
||||
private static final long serialVersionUID = -7044209982830584984L;
|
||||
|
||||
private final Map<String, Object> src;
|
||||
private Map<String, Object> src;
|
||||
private final Class<E> iface;
|
||||
|
||||
public MapperInvocationHandler(Class<E> iface, Map<String, Object> src) {
|
||||
|
@ -95,15 +100,56 @@ public class MapperInvocationHandler<E> implements InvocationHandler, Serializab
|
|||
return true;
|
||||
}
|
||||
}
|
||||
if (otherObj instanceof MapExportable && src.equals(((MapExportable) otherObj).toMap())) {
|
||||
return true;
|
||||
}
|
||||
if (src instanceof MapExportable && otherObj.equals(((MapExportable) src).toMap())) {
|
||||
return true;
|
||||
if (otherObj instanceof MapExportable) {
|
||||
return MappingUtil.compareMaps((MapExportable)otherObj, src);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
if (MapExportable.PUT_METHOD.equals(methodName) && method.getParameterCount() == 2) {
|
||||
final String key = (String)args[0];
|
||||
final Object value = (Object)args[1];
|
||||
if (src instanceof ValueProviderMap) {
|
||||
this.src = fromValueProviderMap(src);
|
||||
}
|
||||
src.put(key, value);
|
||||
return null;
|
||||
}
|
||||
|
||||
if (Entity.WRITTEN_AT_METHOD.equals(methodName) && method.getParameterCount() == 1) {
|
||||
final String key;
|
||||
if (args[0] instanceof String) {
|
||||
key = CacheUtil.writeTimeKey((String)args[0]);
|
||||
} else if (args[0] instanceof Getter) {
|
||||
Getter getter = (Getter)args[0];
|
||||
key = CacheUtil.writeTimeKey(MappingUtil.resolveMappingProperty(getter).getProperty().getPropertyName());
|
||||
} else {
|
||||
return 0L;
|
||||
}
|
||||
long[] v = (long[])src.get(key);
|
||||
if (v != null) {
|
||||
return v[0];
|
||||
}
|
||||
return 0L;
|
||||
}
|
||||
|
||||
if (Entity.TTL_OF_METHOD.equals(methodName) && method.getParameterCount() == 1) {
|
||||
final String key;
|
||||
if (args[0] instanceof String) {
|
||||
key = CacheUtil.ttlKey((String)args[0]);
|
||||
} else if (args[0] instanceof Getter) {
|
||||
Getter getter = (Getter)args[0];
|
||||
key = CacheUtil.ttlKey(MappingUtil.resolveMappingProperty(getter).getProperty().getColumnName().toCql(true));
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
int v[] = (int[])src.get(key);
|
||||
if (v != null) {
|
||||
return v[0];
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (method.getParameterCount() != 0 || method.getReturnType() == void.class) {
|
||||
throw new HelenusException("invalid getter method " + method);
|
||||
}
|
||||
|
@ -129,25 +175,26 @@ public class MapperInvocationHandler<E> implements InvocationHandler, Serializab
|
|||
}
|
||||
|
||||
if (MapExportable.TO_MAP_METHOD.equals(methodName)) {
|
||||
return src; // Collections.unmodifiableMap(src);
|
||||
if (method.getParameterCount() == 1 && args[0] instanceof Boolean) {
|
||||
if ((boolean)args[0] == true) { return src; }
|
||||
}
|
||||
return Collections.unmodifiableMap(src);
|
||||
}
|
||||
|
||||
Object value = src.get(methodName);
|
||||
|
||||
Class<?> returnType = method.getReturnType();
|
||||
final Object value = src.get(methodName);
|
||||
|
||||
if (value == null) {
|
||||
|
||||
Class<?> returnType = method.getReturnType();
|
||||
|
||||
// Default implementations of non-Transient methods in entities are the default
|
||||
// value when the
|
||||
// map contains 'null'.
|
||||
// value when the map contains 'null'.
|
||||
if (method.isDefault()) {
|
||||
return invokeDefault(proxy, method, args);
|
||||
}
|
||||
|
||||
// Otherwise, if the return type of the method is a primitive Java type then
|
||||
// we'll return the standard
|
||||
// default values to avoid a NPE in user code.
|
||||
// we'll return the standard default values to avoid a NPE in user code.
|
||||
if (returnType.isPrimitive()) {
|
||||
DefaultPrimitiveTypes type = DefaultPrimitiveTypes.lookup(returnType);
|
||||
if (type == null) {
|
||||
|
@ -160,6 +207,15 @@ public class MapperInvocationHandler<E> implements InvocationHandler, Serializab
|
|||
return value;
|
||||
}
|
||||
|
||||
static Map<String, Object> fromValueProviderMap(Map v) {
|
||||
Map<String, Object> m = new HashMap<String, Object>(v.size());
|
||||
Set<String> keys = v.keySet();
|
||||
for (String key : keys) {
|
||||
m.put(key, v.get(key));
|
||||
}
|
||||
return m;
|
||||
}
|
||||
|
||||
static class SerializationProxy<E> implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = -5617583940055969353L;
|
||||
|
@ -170,11 +226,7 @@ public class MapperInvocationHandler<E> implements InvocationHandler, Serializab
|
|||
public SerializationProxy(MapperInvocationHandler mapper) {
|
||||
this.iface = mapper.iface;
|
||||
if (mapper.src instanceof ValueProviderMap) {
|
||||
this.src = new HashMap<String, Object>(mapper.src.size());
|
||||
Set<String> keys = mapper.src.keySet();
|
||||
for (String key : keys) {
|
||||
this.src.put(key, mapper.src.get(key));
|
||||
}
|
||||
this.src = fromValueProviderMap(mapper.src);
|
||||
} else {
|
||||
this.src = mapper.src;
|
||||
}
|
||||
|
|
|
@ -132,8 +132,7 @@ public final class HelenusMappingEntity implements HelenusEntity {
|
|||
facetsBuilder.add(new UnboundFacet(primaryKeyProperties));
|
||||
primaryKeyProperties = null;
|
||||
}
|
||||
for (ConstraintValidator<?, ?> constraint :
|
||||
MappingUtil.getValidators(prop.getGetterMethod())) {
|
||||
for (ConstraintValidator<?, ?> constraint : MappingUtil.getValidators(prop.getGetterMethod())) {
|
||||
if (constraint.getClass().isAssignableFrom(DistinctValidator.class)) {
|
||||
DistinctValidator validator = (DistinctValidator) constraint;
|
||||
String[] values = validator.constraintAnnotation.value();
|
||||
|
@ -149,7 +148,7 @@ public final class HelenusMappingEntity implements HelenusEntity {
|
|||
}
|
||||
}
|
||||
}
|
||||
facet = new UnboundFacet(props);
|
||||
facet = new UnboundFacet(props, validator.alone(), validator.combined());
|
||||
} else {
|
||||
facet = new UnboundFacet(prop);
|
||||
}
|
||||
|
|
|
@ -20,7 +20,9 @@ import java.lang.reflect.InvocationTargetException;
|
|||
import java.lang.reflect.Method;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
import javax.validation.Constraint;
|
||||
import javax.validation.ConstraintValidator;
|
||||
import net.helenus.core.Getter;
|
||||
|
@ -126,6 +128,12 @@ public final class MappingUtil {
|
|||
return getter.getName();
|
||||
}
|
||||
|
||||
public static HelenusProperty getPropertyForColumn(HelenusEntity entity, String name) {
|
||||
if (name == null)
|
||||
return null;
|
||||
return entity.getOrderedProperties().stream().filter(p -> p.getColumnName().equals(name)).findFirst().orElse(null);
|
||||
}
|
||||
|
||||
public static String getDefaultColumnName(Method getter) {
|
||||
return Helenus.settings().getPropertyToColumnConverter().apply(getPropertyName(getter));
|
||||
}
|
||||
|
@ -320,4 +328,31 @@ public final class MappingUtil {
|
|||
e.initCause(cause);
|
||||
throw e;
|
||||
}
|
||||
|
||||
public static boolean compareMaps(MapExportable me, Map<String, Object> m2) {
|
||||
Map<String, Object> m1 = me.toMap();
|
||||
List<String> matching = m2.entrySet()
|
||||
.stream()
|
||||
.filter(e -> !e.getKey().matches("^_.*_(ttl|writeTime)$"))
|
||||
.filter(e -> {
|
||||
String k = e.getKey();
|
||||
if (m1.containsKey(k)) {
|
||||
Object o1 = e.getValue();
|
||||
Object o2 = m1.get(k);
|
||||
if (o1 == o2 || o1.equals(o2))
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
})
|
||||
.map(e -> e.getKey())
|
||||
.collect(Collectors.toList());
|
||||
List<String> divergent = m1.entrySet()
|
||||
.stream()
|
||||
.filter(e -> !e.getKey().matches("^_.*_(ttl|writeTime)$"))
|
||||
.filter(e -> !matching.contains(e.getKey()))
|
||||
.map(e -> e.getKey())
|
||||
.collect(Collectors.toList());
|
||||
return divergent.size() > 0 ? false : true;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -234,6 +234,11 @@ public final class Constraints {
|
|||
* @return Java
|
||||
*/
|
||||
String[] value() default "";
|
||||
|
||||
boolean alone() default true;
|
||||
|
||||
boolean combined() default true;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -23,9 +23,12 @@ public final class DistinctValidator
|
|||
extends AbstractConstraintValidator<Constraints.Distinct, CharSequence>
|
||||
implements ConstraintValidator<Constraints.Distinct, CharSequence> {
|
||||
|
||||
private Constraints.Distinct annotation;
|
||||
|
||||
@Override
|
||||
public void initialize(Constraints.Distinct constraintAnnotation) {
|
||||
super.initialize(constraintAnnotation);
|
||||
this.annotation = constraintAnnotation;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -33,4 +36,17 @@ public final class DistinctValidator
|
|||
// TODO(gburd): check that the list contains valid property names.
|
||||
return true;
|
||||
}
|
||||
|
||||
public String[] value() {
|
||||
return annotation == null ? null : annotation.value();
|
||||
}
|
||||
|
||||
public boolean alone() {
|
||||
return annotation == null ? true : annotation.alone();
|
||||
}
|
||||
|
||||
public boolean combined() {
|
||||
return annotation == null ? true : annotation.combined();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -25,13 +25,13 @@ public enum BeanColumnValueProvider implements ColumnValueProvider {
|
|||
INSTANCE;
|
||||
|
||||
@Override
|
||||
public <V> V getColumnValue(
|
||||
Object bean, int columnIndexUnused, HelenusProperty property, boolean immutable) {
|
||||
public <V> V getColumnValue(Object bean, int columnIndexUnused, HelenusProperty property, boolean immutable) {
|
||||
|
||||
Method getter = property.getGetterMethod();
|
||||
|
||||
Object value = null;
|
||||
try {
|
||||
getter.setAccessible(true);
|
||||
value = getter.invoke(bean, new Object[] {});
|
||||
} catch (InvocationTargetException e) {
|
||||
if (e.getCause() != null) {
|
||||
|
|
|
@ -40,8 +40,7 @@ public final class RowColumnValueProvider implements ColumnValueProvider {
|
|||
}
|
||||
|
||||
@Override
|
||||
public <V> V getColumnValue(
|
||||
Object sourceObj, int columnIndex, HelenusProperty property, boolean immutable) {
|
||||
public <V> V getColumnValue(Object sourceObj, int columnIndex, HelenusProperty property, boolean immutable) {
|
||||
|
||||
Row source = (Row) sourceObj;
|
||||
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
package net.helenus.mapping.value;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
@ -45,8 +46,7 @@ public final class ValueProviderMap implements Map<String, Object> {
|
|||
methodName));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object get(Object key) {
|
||||
public Object get(Object key, boolean immutable) {
|
||||
if (key instanceof String) {
|
||||
String name = (String) key;
|
||||
HelenusProperty prop = entity.getProperty(name);
|
||||
|
@ -57,6 +57,11 @@ public final class ValueProviderMap implements Map<String, Object> {
|
|||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object get(Object key) {
|
||||
return get(key, this.immutable);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<String> keySet() {
|
||||
return entity
|
||||
|
@ -78,7 +83,7 @@ public final class ValueProviderMap implements Map<String, Object> {
|
|||
|
||||
@Override
|
||||
public boolean containsKey(Object key) {
|
||||
if (key instanceof Object) {
|
||||
if (key instanceof String) {
|
||||
String s = (String) key;
|
||||
return keySet().contains(s);
|
||||
}
|
||||
|
@ -149,7 +154,7 @@ public final class ValueProviderMap implements Map<String, Object> {
|
|||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || (!o.getClass().isAssignableFrom(Map.class) && getClass() != o.getClass()))
|
||||
if (o == null || !(o.getClass().isAssignableFrom(Map.class) || o.getClass().getSimpleName().equals("UnmodifiableMap")))
|
||||
return false;
|
||||
|
||||
Map that = (Map) o;
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
package net.helenus.test.integration.core.draft;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
import net.helenus.core.AbstractAuditedEntityDraft;
|
||||
import net.helenus.core.Helenus;
|
||||
|
@ -89,5 +90,6 @@ public interface Inventory {
|
|||
mutate(inventory::NORAM, count);
|
||||
return this;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,6 +25,7 @@ import net.helenus.core.Helenus;
|
|||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.core.UnitOfWork;
|
||||
import net.helenus.core.annotation.Cacheable;
|
||||
import net.helenus.core.reflect.Entity;
|
||||
import net.helenus.mapping.annotation.Constraints;
|
||||
import net.helenus.mapping.annotation.Index;
|
||||
import net.helenus.mapping.annotation.PartitionKey;
|
||||
|
@ -36,7 +37,7 @@ import org.junit.Test;
|
|||
|
||||
@Table
|
||||
@Cacheable
|
||||
interface Widget {
|
||||
interface Widget extends Entity {
|
||||
@PartitionKey
|
||||
UUID id();
|
||||
|
||||
|
@ -48,6 +49,12 @@ interface Widget {
|
|||
String a();
|
||||
|
||||
String b();
|
||||
|
||||
@Constraints.Distinct(alone=false)
|
||||
String c();
|
||||
|
||||
@Constraints.Distinct(combined=false)
|
||||
String d();
|
||||
}
|
||||
|
||||
public class UnitOfWorkTest extends AbstractEmbeddedCassandraTest {
|
||||
|
@ -81,6 +88,8 @@ public class UnitOfWorkTest extends AbstractEmbeddedCassandraTest {
|
|||
.value(widget::name, RandomString.make(20))
|
||||
.value(widget::a, RandomString.make(10))
|
||||
.value(widget::b, RandomString.make(10))
|
||||
.value(widget::c, RandomString.make(10))
|
||||
.value(widget::d, RandomString.make(10))
|
||||
.sync();
|
||||
|
||||
try (UnitOfWork uow = session.begin()) {
|
||||
|
@ -127,6 +136,8 @@ public class UnitOfWorkTest extends AbstractEmbeddedCassandraTest {
|
|||
.value(widget::name, RandomString.make(20))
|
||||
.value(widget::a, RandomString.make(10))
|
||||
.value(widget::b, RandomString.make(10))
|
||||
.value(widget::c, RandomString.make(10))
|
||||
.value(widget::d, RandomString.make(10))
|
||||
.sync(uow1);
|
||||
|
||||
try (UnitOfWork uow2 = session.begin(uow1)) {
|
||||
|
@ -149,6 +160,8 @@ public class UnitOfWorkTest extends AbstractEmbeddedCassandraTest {
|
|||
.value(widget::name, RandomString.make(20))
|
||||
.value(widget::a, RandomString.make(10))
|
||||
.value(widget::b, RandomString.make(10))
|
||||
.value(widget::c, RandomString.make(10))
|
||||
.value(widget::d, RandomString.make(10))
|
||||
.sync(uow2);
|
||||
|
||||
uow2.commit()
|
||||
|
@ -189,6 +202,8 @@ public class UnitOfWorkTest extends AbstractEmbeddedCassandraTest {
|
|||
.value(widget::name, RandomString.make(20))
|
||||
.value(widget::a, RandomString.make(10))
|
||||
.value(widget::b, RandomString.make(10))
|
||||
.value(widget::c, RandomString.make(10))
|
||||
.value(widget::d, RandomString.make(10))
|
||||
.sync(uow);
|
||||
|
||||
// This should read from the database and return a Widget.
|
||||
|
@ -225,6 +240,8 @@ public class UnitOfWorkTest extends AbstractEmbeddedCassandraTest {
|
|||
.value(widget::name, RandomString.make(20))
|
||||
.value(widget::a, RandomString.make(10))
|
||||
.value(widget::b, RandomString.make(10))
|
||||
.value(widget::c, RandomString.make(10))
|
||||
.value(widget::d, RandomString.make(10))
|
||||
.sync();
|
||||
|
||||
try (UnitOfWork uow = session.begin()) {
|
||||
|
@ -235,13 +252,16 @@ public class UnitOfWorkTest extends AbstractEmbeddedCassandraTest {
|
|||
Assert.assertEquals(w1, w2);
|
||||
|
||||
// This should remove the object from the session cache.
|
||||
w3 =
|
||||
session.<Widget>update(w2).set(widget::name, "Bill").where(widget::id, eq(key)).sync(uow);
|
||||
w3 = session
|
||||
.<Widget>update(w2)
|
||||
.set(widget::name, "Bill")
|
||||
.where(widget::id, eq(key))
|
||||
.sync(uow);
|
||||
|
||||
// Fetch from session cache, should have old name.
|
||||
// Fetch from session cache will cache miss (as it was updated) and trigger a SELECT.
|
||||
w4 = session.<Widget>select(widget).where(widget::id, eq(key)).single().sync().orElse(null);
|
||||
Assert.assertEquals(w4, w2);
|
||||
Assert.assertEquals(w4.name(), w1.name());
|
||||
Assert.assertEquals(w4.name(), w3.name());
|
||||
|
||||
// This should skip the cache.
|
||||
w5 =
|
||||
|
@ -253,15 +273,14 @@ public class UnitOfWorkTest extends AbstractEmbeddedCassandraTest {
|
|||
.sync()
|
||||
.orElse(null);
|
||||
|
||||
Assert.assertNotEquals(w5, w2); // Not the same instance,
|
||||
Assert.assertTrue(w2.equals(w5)); // but they have the same values,
|
||||
Assert.assertFalse(w5.equals(w2)); // regardless of the order when comparing.
|
||||
Assert.assertTrue(w5.equals(w2));
|
||||
Assert.assertTrue(w2.equals(w5));
|
||||
Assert.assertEquals(w5.name(), "Bill");
|
||||
|
||||
uow.commit()
|
||||
.andThen(
|
||||
() -> {
|
||||
Assert.assertEquals(w1, w2);
|
||||
Assert.assertEquals(w2, w3);
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -289,82 +308,166 @@ public class UnitOfWorkTest extends AbstractEmbeddedCassandraTest {
|
|||
.value(widget::name, RandomString.make(20))
|
||||
.value(widget::a, RandomString.make(10))
|
||||
.value(widget::b, RandomString.make(10))
|
||||
.value(widget::c, RandomString.make(10))
|
||||
.value(widget::d, RandomString.make(10))
|
||||
.sync();
|
||||
|
||||
try (UnitOfWork uow = session.begin()) {
|
||||
|
||||
// This should read from the database and return a Widget.
|
||||
w2 =
|
||||
session.<Widget>select(widget).where(widget::id, eq(key)).single().sync(uow).orElse(null);
|
||||
w2 = session.<Widget>select(widget).where(widget::id, eq(key)).single().sync(uow).orElse(null);
|
||||
|
||||
// This should remove the object from the cache.
|
||||
session.delete(widget).where(widget::id, eq(key)).sync(uow);
|
||||
session.delete(widget).where(widget::id, eq(key))
|
||||
.sync(uow);
|
||||
|
||||
// This should fail to read from the cache.
|
||||
w3 =
|
||||
session.<Widget>select(widget).where(widget::id, eq(key)).single().sync(uow).orElse(null);
|
||||
w3 = session.<Widget>select(widget).where(widget::id, eq(key)).single().sync(uow).orElse(null);
|
||||
|
||||
Assert.assertEquals(w3, null);
|
||||
Assert.assertEquals(null, w3);
|
||||
|
||||
uow.commit()
|
||||
.andThen(
|
||||
() -> {
|
||||
Assert.assertEquals(w1, w2);
|
||||
Assert.assertEquals(w3, null);
|
||||
Assert.assertEquals(null, w3);
|
||||
});
|
||||
}
|
||||
|
||||
w4 =
|
||||
session
|
||||
.<Widget>select(widget)
|
||||
.where(widget::name, eq(w1.name()))
|
||||
.where(widget::id, eq(key))
|
||||
.single()
|
||||
.sync()
|
||||
.orElse(null);
|
||||
|
||||
Assert.assertEquals(w4, null);
|
||||
Assert.assertEquals(null, w4);
|
||||
}
|
||||
/*
|
||||
|
||||
@Test
|
||||
public void testBatchingUpdatesAndInserts() throws Exception {
|
||||
Widget w1, w2, w3, w4, w5;
|
||||
Long committedAt = 0L;
|
||||
UUID key = UUIDs.timeBased();
|
||||
|
||||
try (UnitOfWork uow = session.begin()) {
|
||||
w1 = session.<Widget>upsert(widget)
|
||||
.value(widget::id, key)
|
||||
.value(widget::name, RandomString.make(20))
|
||||
.value(widget::a, RandomString.make(10))
|
||||
.value(widget::b, RandomString.make(10))
|
||||
.value(widget::c, RandomString.make(10))
|
||||
.value(widget::d, RandomString.make(10))
|
||||
.batch(uow);
|
||||
Assert.assertTrue(0L == w1.writtenAt(widget::name));
|
||||
Assert.assertTrue(0 == w1.ttlOf(widget::name));
|
||||
w2 = session.<Widget>update(w1)
|
||||
.set(widget::name, RandomString.make(10))
|
||||
.where(widget::id, eq(key))
|
||||
.usingTtl(30)
|
||||
.batch(uow);
|
||||
Assert.assertEquals(w1, w2);
|
||||
Assert.assertTrue(0L == w2.writtenAt(widget::name));
|
||||
Assert.assertTrue(30 == w1.ttlOf(widget::name));
|
||||
w3 = session.<Widget>select(Widget.class)
|
||||
.where(widget::id, eq(key))
|
||||
.single()
|
||||
.sync(uow)
|
||||
.orElse(null);
|
||||
Assert.assertEquals(w2, w3);
|
||||
Assert.assertTrue(0L == w3.writtenAt(widget::name));
|
||||
Assert.assertTrue(30 <= w3.ttlOf(widget::name));
|
||||
uow.commit();
|
||||
committedAt = uow.committedAt();
|
||||
}
|
||||
w4 = session.<Widget>select(Widget.class)
|
||||
.where(widget::id, eq(key))
|
||||
.single()
|
||||
.sync()
|
||||
.orElse(null);
|
||||
Assert.assertEquals(w3, w4);
|
||||
Assert.assertTrue(w4.writtenAt(widget::name) == committedAt);
|
||||
int ttl4 = w4.ttlOf(widget::name);
|
||||
Assert.assertTrue(ttl4 <= 30);
|
||||
w5 = session.<Widget>select(Widget.class)
|
||||
.where(widget::id, eq(key))
|
||||
.uncached()
|
||||
.single()
|
||||
.sync()
|
||||
.orElse(null);
|
||||
Assert.assertTrue(w4.equals(w5));
|
||||
Assert.assertTrue(w5.writtenAt(widget::name) == committedAt);
|
||||
int ttl5 = w5.ttlOf(widget::name);
|
||||
Assert.assertTrue(ttl5 <= 30);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInsertNoOp() throws Exception {
|
||||
Widget w1, w2;
|
||||
UUID key = UUIDs.timeBased();
|
||||
|
||||
UUID key1 = UUIDs.timeBased();
|
||||
|
||||
try (UnitOfWork uow = session.begin()) {
|
||||
// This should inserted Widget, but not cache it.
|
||||
w1 = session.<Widget>insert(widget).value(widget::id, key).value(widget::name, RandomString.make(20)).sync(uow);
|
||||
w2 = session.<Widget>insert(w1).value(widget::id, key).sync(uow);
|
||||
w1 = session.<Widget>insert(widget)
|
||||
.value(widget::id, key1)
|
||||
.value(widget::name, RandomString.make(20))
|
||||
.sync(uow);
|
||||
w2 = session.<Widget>upsert(w1)
|
||||
.value(widget::a, RandomString.make(10))
|
||||
.value(widget::b, RandomString.make(10))
|
||||
.value(widget::c, RandomString.make(10))
|
||||
.value(widget::d, RandomString.make(10))
|
||||
.sync(uow);
|
||||
uow.commit();
|
||||
}
|
||||
Assert.assertEquals(w1, w2);
|
||||
//TODO(gburd): Assert.assertEquals(w1, w2);
|
||||
}
|
||||
*/
|
||||
/*
|
||||
* @Test public void testSelectAfterInsertProperlyCachesEntity() throws
|
||||
* Exception { Widget w1, w2, w3, w4; UUID key = UUIDs.timeBased();
|
||||
*
|
||||
* try (UnitOfWork uow = session.begin()) {
|
||||
*
|
||||
* // This should cache the inserted Widget. w1 = session.<Widget>insert(widget)
|
||||
* .value(widget::id, key) .value(widget::name, RandomString.make(20))
|
||||
* .sync(uow);
|
||||
*
|
||||
* // This should read from the cache and get the same instance of a Widget. w2
|
||||
* = session.<Widget>select(widget) .where(widget::id, eq(key)) .single()
|
||||
* .sync(uow) .orElse(null);
|
||||
*
|
||||
* uow.commit() .andThen(() -> { Assert.assertEquals(w1, w2); }); }
|
||||
*
|
||||
* // This should read the widget from the session cache and maintain object
|
||||
* identity. w3 = session.<Widget>select(widget) .where(widget::id, eq(key))
|
||||
* .single() .sync() .orElse(null);
|
||||
*
|
||||
* Assert.assertEquals(w1, w3);
|
||||
*
|
||||
* // This should read the widget from the database, no object identity but
|
||||
* values should match. w4 = session.<Widget>select(widget) .where(widget::id,
|
||||
* eq(key)) .uncached() .single() .sync() .orElse(null);
|
||||
*
|
||||
* Assert.assertNotEquals(w1, w4); Assert.assertTrue(w1.equals(w4)); }
|
||||
*/
|
||||
|
||||
@Test public void testSelectAfterInsertProperlyCachesEntity() throws
|
||||
Exception { Widget w1, w2, w3, w4; UUID key = UUIDs.timeBased();
|
||||
|
||||
try (UnitOfWork uow = session.begin()) {
|
||||
// This should cache the inserted Widget.
|
||||
w1 = session.<Widget>insert(widget)
|
||||
.value(widget::id, key)
|
||||
.value(widget::name, RandomString.make(20))
|
||||
.value(widget::a, RandomString.make(10))
|
||||
.value(widget::b, RandomString.make(10))
|
||||
.value(widget::c, RandomString.make(10))
|
||||
.value(widget::d, RandomString.make(10))
|
||||
.sync(uow);
|
||||
|
||||
// This should read from the cache and get the same instance of a Widget.
|
||||
w2 = session.<Widget>select(widget)
|
||||
.where(widget::id, eq(key))
|
||||
.single()
|
||||
.sync(uow)
|
||||
.orElse(null);
|
||||
|
||||
uow.commit() .andThen(() -> { Assert.assertEquals(w1, w2); }); }
|
||||
|
||||
// This should read the widget from the session cache and maintain object identity.
|
||||
w3 = session.<Widget>select(widget)
|
||||
.where(widget::id, eq(key))
|
||||
.single()
|
||||
.sync()
|
||||
.orElse(null);
|
||||
|
||||
Assert.assertEquals(w1, w3);
|
||||
|
||||
// This should read the widget from the database, no object identity but
|
||||
// values should match.
|
||||
w4 = session.<Widget>select(widget)
|
||||
.where(widget::id,eq(key))
|
||||
.uncached()
|
||||
.single()
|
||||
.sync()
|
||||
.orElse(null);
|
||||
|
||||
Assert.assertFalse(w1 == w4);
|
||||
Assert.assertTrue(w1.equals(w4));
|
||||
Assert.assertTrue(w4.equals(w1));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -55,5 +55,6 @@ public interface Account {
|
|||
public Map<String, Object> toMap() {
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue