WIP: More work toward UOW caching but mainly a lot of work to cleanup the API.
This commit is contained in:
parent
09b06f4ca4
commit
3cd889ea61
28 changed files with 471 additions and 322 deletions
|
@ -22,8 +22,8 @@ import com.datastax.driver.core.querybuilder.BuiltStatement;
|
|||
import com.google.common.util.concurrent.ListenableFuture;
|
||||
import java.io.PrintStream;
|
||||
import java.util.concurrent.Executor;
|
||||
import net.helenus.core.operation.AbstractCache;
|
||||
import net.helenus.core.operation.CacheManager;
|
||||
|
||||
import net.helenus.core.operation.SessionCache;
|
||||
import net.helenus.mapping.value.ColumnValuePreparer;
|
||||
import net.helenus.mapping.value.ColumnValueProvider;
|
||||
import net.helenus.support.HelenusException;
|
||||
|
@ -113,10 +113,6 @@ public abstract class AbstractSessionOperations {
|
|||
return null;
|
||||
}
|
||||
|
||||
public AbstractCache cacheFor(CacheManager.Type type) {
|
||||
return null;
|
||||
}
|
||||
|
||||
RuntimeException translateException(RuntimeException e) {
|
||||
if (e instanceof HelenusException) {
|
||||
return e;
|
||||
|
@ -127,4 +123,7 @@ public abstract class AbstractSessionOperations {
|
|||
void printCql(String cql) {
|
||||
getPrintStream().println(cql);
|
||||
}
|
||||
|
||||
public SessionCache getSessionCache() { return null; }
|
||||
|
||||
}
|
||||
|
|
|
@ -18,22 +18,10 @@ package net.helenus.core;
|
|||
import brave.Tracer;
|
||||
import com.codahale.metrics.MetricRegistry;
|
||||
import com.datastax.driver.core.*;
|
||||
import com.diffplug.common.base.Errors;
|
||||
import com.google.common.cache.Cache;
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import java.io.Closeable;
|
||||
import java.io.PrintStream;
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.Executor;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Function;
|
||||
import net.helenus.core.operation.*;
|
||||
import net.helenus.core.reflect.Drafted;
|
||||
import net.helenus.core.reflect.DslExportable;
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
import net.helenus.mapping.MappingUtil;
|
||||
import net.helenus.mapping.value.*;
|
||||
import net.helenus.support.DslPropertyException;
|
||||
|
@ -41,11 +29,13 @@ import net.helenus.support.Fun;
|
|||
import net.helenus.support.Fun.Tuple1;
|
||||
import net.helenus.support.Fun.Tuple2;
|
||||
import net.helenus.support.Fun.Tuple6;
|
||||
import net.helenus.support.HelenusMappingException;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.PrintStream;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.Executor;
|
||||
import java.util.function.Function;
|
||||
|
@ -72,7 +62,7 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
private final RowColumnValueProvider valueProvider;
|
||||
private final StatementColumnValuePreparer valuePreparer;
|
||||
private final Metadata metadata;
|
||||
private final CacheManager cacheManager;
|
||||
private final SessionCache sessionCache;
|
||||
|
||||
HelenusSession(
|
||||
Session session,
|
||||
|
@ -103,7 +93,7 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
this.valueProvider = new RowColumnValueProvider(this.sessionRepository);
|
||||
this.valuePreparer = new StatementColumnValuePreparer(this.sessionRepository);
|
||||
this.metadata = session.getCluster().getMetadata();
|
||||
this.cacheManager = new CacheManager(this);
|
||||
this.sessionCache = new SessionCache();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -190,13 +180,26 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
return child.begin();
|
||||
}
|
||||
|
||||
public <E> SelectOperation<E> select(Class<E> entityClass) {
|
||||
public <E> SelectOperation<E> select(E pojo) {
|
||||
Objects.requireNonNull(pojo, "supplied object must be a dsl for a registered entity but cannot be null");
|
||||
ColumnValueProvider valueProvider = getValueProvider();
|
||||
HelenusEntity entity = Helenus.resolve(pojo);
|
||||
Class<?> entityClass = entity.getMappingInterface();
|
||||
|
||||
return new SelectOperation<E>(
|
||||
this,
|
||||
entity,
|
||||
(r) -> {
|
||||
Map<String, Object> map = new ValueProviderMap(r, valueProvider, entity);
|
||||
return (E) Helenus.map(entityClass, map);
|
||||
});
|
||||
}
|
||||
|
||||
public <E> SelectOperation<E> select(Class<E> entityClass) {
|
||||
Objects.requireNonNull(entityClass, "entityClass is empty");
|
||||
ColumnValueProvider valueProvider = getValueProvider();
|
||||
HelenusEntity entity = Helenus.entity(entityClass);
|
||||
|
||||
//TODO cache entity
|
||||
return new SelectOperation<E>(
|
||||
this,
|
||||
entity,
|
||||
|
@ -215,6 +218,12 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
return new SelectOperation<Row>(this, Helenus.entity(entityClass));
|
||||
}
|
||||
|
||||
public <E> SelectOperation<Row> selectAll(E pojo) {
|
||||
Objects.requireNonNull(pojo, "supplied object must be a dsl for a registered entity but cannot be null");
|
||||
HelenusEntity entity = Helenus.resolve(pojo);
|
||||
return new SelectOperation<Row>(this, entity);
|
||||
}
|
||||
|
||||
public <E> SelectOperation<E> selectAll(Class<E> entityClass, Function<Row, E> rowMapper) {
|
||||
Objects.requireNonNull(entityClass, "entityClass is empty");
|
||||
Objects.requireNonNull(rowMapper, "rowMapper is empty");
|
||||
|
@ -376,12 +385,12 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
return new CountOperation(this, Helenus.resolve(dsl));
|
||||
}
|
||||
|
||||
public <V> UpdateOperation update() {
|
||||
return new UpdateOperation(this);
|
||||
public UpdateOperation<ResultSet> update() {
|
||||
return new UpdateOperation<ResultSet>(this);
|
||||
}
|
||||
|
||||
public <T> UpdateOperation update(Drafted<T> draft) {
|
||||
UpdateOperation update = new UpdateOperation(this);
|
||||
public <T> UpdateOperation<T> update(Drafted<T> draft) {
|
||||
UpdateOperation update = new UpdateOperation<T>(this, draft.build());
|
||||
Map<String, Object> map = draft.toMap();
|
||||
HelenusEntity entity = draft.getEntity();
|
||||
Set<String> mutatedProperties = draft.mutated();
|
||||
|
@ -427,28 +436,39 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
return update;
|
||||
}
|
||||
|
||||
public <V> UpdateOperation update(Getter<V> getter, V v) {
|
||||
public <V> UpdateOperation<ResultSet> update(Getter<V> getter, V v) {
|
||||
Objects.requireNonNull(getter, "field is empty");
|
||||
Objects.requireNonNull(v, "value is empty");
|
||||
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(getter);
|
||||
|
||||
return new UpdateOperation(this, p, v);
|
||||
return new UpdateOperation<ResultSet>(this, p, v);
|
||||
}
|
||||
|
||||
public <T> InsertOperation<T> insert() {
|
||||
return new InsertOperation<T>(this, true);
|
||||
public InsertOperation<ResultSet> insert() {
|
||||
return new InsertOperation<ResultSet>(this, true);
|
||||
}
|
||||
|
||||
public <T> InsertOperation<T> insert(Object pojo) {
|
||||
public <T> InsertOperation<T> insert(Class<?> resultType) {
|
||||
return new InsertOperation<T>(this, resultType, true);
|
||||
}
|
||||
|
||||
public <T> InsertOperation<T> insert(T pojo) {
|
||||
Objects.requireNonNull(pojo, "supplied object must be either an instance of the entity class or a dsl for it, but cannot be null");
|
||||
HelenusEntity entity = null;
|
||||
try { entity = Helenus.resolve(pojo); } catch (HelenusMappingException e) {}
|
||||
if (entity != null) {
|
||||
return new InsertOperation<T>(this, entity.getMappingInterface(), true);
|
||||
} else {
|
||||
return this.<T>insert(pojo, null);
|
||||
}
|
||||
}
|
||||
|
||||
public <T> InsertOperation<T> insert(Drafted draft) {
|
||||
return this.<T>insert(draft.build(), draft.mutated());
|
||||
return this.<T>insert((T) draft.build(), draft.mutated());
|
||||
}
|
||||
|
||||
public <T> InsertOperation<T> insert(Object pojo, Set<String> mutations) {
|
||||
public <T> InsertOperation<T> insert(T pojo, Set<String> mutations) {
|
||||
Objects.requireNonNull(pojo, "pojo is empty");
|
||||
|
||||
Class<?> iface = MappingUtil.getMappingInterface(pojo);
|
||||
|
@ -457,19 +477,30 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
return new InsertOperation<T>(this, entity, pojo, mutations, true);
|
||||
}
|
||||
|
||||
public <T> InsertOperation<T> upsert() {
|
||||
return new InsertOperation<T>(this, false);
|
||||
public InsertOperation<ResultSet> upsert() {
|
||||
return new InsertOperation<ResultSet>(this, false);
|
||||
}
|
||||
|
||||
public <T> InsertOperation<T> upsert(Class<?> resultType) {
|
||||
return new InsertOperation<T>(this, resultType, false);
|
||||
}
|
||||
|
||||
public <T> InsertOperation<T> upsert(Drafted draft) {
|
||||
return this.<T>upsert(draft.build(), draft.mutated());
|
||||
return this.<T>upsert((T) draft.build(), draft.mutated());
|
||||
}
|
||||
|
||||
public <T> InsertOperation<T> upsert(Object pojo) {
|
||||
public <T> InsertOperation<T> upsert(T pojo) {
|
||||
Objects.requireNonNull(pojo, "supplied object must be either an instance of the entity class or a dsl for it, but cannot be null");
|
||||
HelenusEntity entity = null;
|
||||
try { entity = Helenus.resolve(pojo); } catch (HelenusMappingException e) {}
|
||||
if (entity != null) {
|
||||
return new InsertOperation<T>(this, entity.getMappingInterface(), false);
|
||||
} else {
|
||||
return this.<T>upsert(pojo, null);
|
||||
}
|
||||
}
|
||||
|
||||
public <T> InsertOperation<T> upsert(Object pojo, Set<String> mutations) {
|
||||
public <T> InsertOperation<T> upsert(T pojo, Set<String> mutations) {
|
||||
Objects.requireNonNull(pojo, "pojo is empty");
|
||||
|
||||
Class<?> iface = MappingUtil.getMappingInterface(pojo);
|
||||
|
@ -487,11 +518,6 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
return new DeleteOperation(this, Helenus.resolve(dsl));
|
||||
}
|
||||
|
||||
@Override
|
||||
public AbstractCache cacheFor(CacheManager.Type type) {
|
||||
return cacheManager.of(type);
|
||||
}
|
||||
|
||||
public Session getSession() {
|
||||
return session;
|
||||
}
|
||||
|
@ -539,4 +565,7 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
break;
|
||||
}
|
||||
}
|
||||
|
||||
public SessionCache getSessionCache() { return sessionCache; }
|
||||
|
||||
}
|
||||
|
|
|
@ -328,19 +328,6 @@ public final class SessionInitializer extends AbstractSessionOperations {
|
|||
e -> {
|
||||
action.accept(e);
|
||||
});
|
||||
/*
|
||||
Set<HelenusEntity> processedSet = new HashSet<HelenusEntity>();
|
||||
Set<HelenusEntity> stack = new HashSet<HelenusEntity>();
|
||||
|
||||
sessionRepository.entities().stream()
|
||||
.filter(e -> e.getType() == HelenusEntityType.UDT)
|
||||
.collect(Collectors.toCollection(ArrayDeque::new))
|
||||
.descendingIterator()
|
||||
.forEachRemaining(e -> {
|
||||
stack.clear();
|
||||
eachUserTypeInRecursion(e, processedSet, stack, userTypeOps, action);
|
||||
});
|
||||
*/
|
||||
}
|
||||
|
||||
private void eachUserTypeInRecursion(
|
||||
|
|
|
@ -2,14 +2,16 @@ package net.helenus.core;
|
|||
|
||||
import com.diffplug.common.base.Errors;
|
||||
import com.google.common.collect.TreeTraverser;
|
||||
import net.helenus.core.operation.AbstractCache;
|
||||
import net.helenus.core.operation.UnitOfWorkCache;
|
||||
import net.helenus.support.HelenusException;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
|
||||
|
||||
/** Encapsulates the concept of a "transaction" as a unit-of-work. */
|
||||
public final class UnitOfWork implements Closeable {
|
||||
public final class UnitOfWork implements AutoCloseable {
|
||||
private final List<UnitOfWork> nested = new ArrayList<>();
|
||||
private final HelenusSession session;
|
||||
private final UnitOfWork parent;
|
||||
|
@ -19,6 +21,7 @@ public final class UnitOfWork implements Closeable {
|
|||
|
||||
protected UnitOfWork(HelenusSession session, UnitOfWork parent) {
|
||||
Objects.requireNonNull(session, "containing session cannot be null");
|
||||
|
||||
this.session = session;
|
||||
this.parent = parent;
|
||||
}
|
||||
|
@ -41,6 +44,10 @@ public final class UnitOfWork implements Closeable {
|
|||
return this;
|
||||
}
|
||||
|
||||
public UnitOfWorkCache getCacheEnclosing(AbstractCache cache) {
|
||||
return new UnitOfWorkCache(this, cache);
|
||||
}
|
||||
|
||||
private void applyPostCommitFunctions() {
|
||||
if (!postCommit.isEmpty()) {
|
||||
for (CommitThunk f : postCommit) {
|
||||
|
@ -111,7 +118,7 @@ public final class UnitOfWork implements Closeable {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
public void close() throws HelenusException {
|
||||
// Closing a UnitOfWork will abort iff we've not already aborted or committed this unit of work.
|
||||
if (aborted == false && committed == false) {
|
||||
abort();
|
||||
|
|
|
@ -4,37 +4,42 @@ import com.datastax.driver.core.ResultSet;
|
|||
import com.datastax.driver.core.ResultSetFuture;
|
||||
import com.datastax.driver.core.Statement;
|
||||
import com.google.common.cache.Cache;
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import com.google.common.cache.RemovalListener;
|
||||
import com.google.common.cache.RemovalNotification;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
public abstract class AbstractCache {
|
||||
protected CacheManager.Type type;
|
||||
protected Cache<String, ResultSet> cache;
|
||||
public abstract class AbstractCache<K, V> {
|
||||
final Logger logger = LoggerFactory.getLogger(getClass());
|
||||
protected Cache<K, V> cache;
|
||||
|
||||
public AbstractCache(CacheManager.Type type, Cache<String, ResultSet> cache) {
|
||||
this.type = type;
|
||||
this.cache = cache;
|
||||
public AbstractCache() {
|
||||
RemovalListener<K, V> listener =
|
||||
new RemovalListener<K, V>() {
|
||||
@Override
|
||||
public void onRemoval(RemovalNotification<K, V> n) {
|
||||
if (n.wasEvicted()) {
|
||||
String cause = n.getCause().name();
|
||||
logger.info(cause);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
cache = CacheBuilder.newBuilder()
|
||||
.maximumSize(10_000)
|
||||
.expireAfterAccess(20, TimeUnit.MINUTES)
|
||||
.weakKeys()
|
||||
.softValues()
|
||||
.removalListener(listener)
|
||||
.build();
|
||||
}
|
||||
|
||||
protected abstract ResultSet fetch(
|
||||
protected abstract ResultSet apply(
|
||||
Statement statement, OperationsDelegate delegate, ResultSetFuture resultSetFuture)
|
||||
throws InterruptedException, ExecutionException;
|
||||
|
||||
protected abstract ResultSet mutate(
|
||||
Statement statement, OperationsDelegate delegate, ResultSetFuture resultSetFuture)
|
||||
throws InterruptedException, ExecutionException;
|
||||
|
||||
public ResultSet apply(
|
||||
Statement statement, OperationsDelegate delegate, ResultSetFuture futureResultSet)
|
||||
throws InterruptedException, ExecutionException {
|
||||
ResultSet resultSet = null;
|
||||
switch (type) {
|
||||
case FETCH:
|
||||
resultSet = fetch(statement, delegate, futureResultSet);
|
||||
break;
|
||||
case MUTATE:
|
||||
resultSet = mutate(statement, delegate, futureResultSet);
|
||||
break;
|
||||
}
|
||||
return resultSet;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -34,9 +34,7 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
|
|||
|
||||
public abstract Optional<E> transform(ResultSet resultSet);
|
||||
|
||||
protected AbstractCache getCache() {
|
||||
return null;
|
||||
}
|
||||
protected AbstractCache getCache() { return null; }
|
||||
|
||||
public CacheKey getCacheKey() {
|
||||
return null;
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
package net.helenus.core.operation;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
public interface CachableOperation {
|
||||
public <T extends Serializable> T getCacheKey();
|
||||
public <T extends Serializable> T valueToCache();
|
||||
}
|
|
@ -2,7 +2,9 @@ package net.helenus.core.operation;
|
|||
|
||||
import com.datastax.driver.core.Statement;
|
||||
|
||||
public class CacheKey {
|
||||
import java.io.Serializable;
|
||||
|
||||
public class CacheKey implements Serializable {
|
||||
|
||||
private String key;
|
||||
|
||||
|
@ -38,4 +40,5 @@ public class CacheKey {
|
|||
public int hashCode() {
|
||||
return key.hashCode();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,53 +0,0 @@
|
|||
package net.helenus.core.operation;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.google.common.cache.Cache;
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import com.google.common.cache.RemovalListener;
|
||||
import com.google.common.cache.RemovalNotification;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class CacheManager {
|
||||
public enum Type {
|
||||
FETCH,
|
||||
MUTATE
|
||||
}
|
||||
|
||||
final Logger logger = LoggerFactory.getLogger(getClass());
|
||||
final HelenusSession session;
|
||||
|
||||
private AbstractCache sessionFetch;
|
||||
|
||||
public CacheManager(HelenusSession session) {
|
||||
this.session = session;
|
||||
|
||||
RemovalListener<String, ResultSet> listener =
|
||||
new RemovalListener<String, ResultSet>() {
|
||||
@Override
|
||||
public void onRemoval(RemovalNotification<String, ResultSet> n) {
|
||||
if (n.wasEvicted()) {
|
||||
String cause = n.getCause().name();
|
||||
logger.info(cause);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Cache<String, ResultSet> cache =
|
||||
CacheBuilder.newBuilder()
|
||||
.maximumSize(10_000)
|
||||
.expireAfterAccess(20, TimeUnit.MINUTES)
|
||||
.weakKeys()
|
||||
.softValues()
|
||||
.removalListener(listener)
|
||||
.build();
|
||||
|
||||
sessionFetch = new SessionCache(Type.FETCH, this, cache);
|
||||
}
|
||||
|
||||
public AbstractCache of(CacheManager.Type type) {
|
||||
return sessionFetch;
|
||||
}
|
||||
}
|
|
@ -59,6 +59,10 @@ public enum Executioner {
|
|||
span = tracer.newChild(traceContext);
|
||||
}
|
||||
|
||||
if (uow != null) {
|
||||
cache = uow.getCacheEnclosing(cache);
|
||||
}
|
||||
|
||||
try {
|
||||
if (span != null) {
|
||||
span.name("cassandra");
|
||||
|
|
|
@ -38,10 +38,10 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
|
|||
|
||||
private HelenusEntity entity;
|
||||
|
||||
private final List<Fun.Tuple2<HelenusPropertyNode, Object>> values =
|
||||
new ArrayList<Fun.Tuple2<HelenusPropertyNode, Object>>();
|
||||
private final List<Fun.Tuple2<HelenusPropertyNode, Object>> values = new ArrayList<Fun.Tuple2<HelenusPropertyNode, Object>>();
|
||||
private final T pojo;
|
||||
private final Class<?> resultType;
|
||||
private boolean ifNotExists;
|
||||
private Object pojo;
|
||||
|
||||
private int[] ttl;
|
||||
private long[] timestamp;
|
||||
|
@ -50,12 +50,22 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
|
|||
super(sessionOperations);
|
||||
|
||||
this.ifNotExists = ifNotExists;
|
||||
this.pojo = null;
|
||||
this.resultType = ResultSet.class;
|
||||
}
|
||||
|
||||
public InsertOperation(AbstractSessionOperations sessionOperations, Class<?> resultType, boolean ifNotExists) {
|
||||
super(sessionOperations);
|
||||
|
||||
this.ifNotExists = ifNotExists;
|
||||
this.pojo = null;
|
||||
this.resultType = resultType;
|
||||
}
|
||||
|
||||
public InsertOperation(
|
||||
AbstractSessionOperations sessionOperations,
|
||||
HelenusEntity entity,
|
||||
Object pojo,
|
||||
T pojo,
|
||||
Set<String> mutations,
|
||||
boolean ifNotExists) {
|
||||
super(sessionOperations);
|
||||
|
@ -63,6 +73,8 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
|
|||
this.entity = entity;
|
||||
this.pojo = pojo;
|
||||
this.ifNotExists = ifNotExists;
|
||||
this.resultType = pojo.getClass() == null ? ResultSet.class : pojo.getClass();
|
||||
|
||||
Collection<HelenusProperty> properties = entity.getOrderedProperties();
|
||||
Set<String> keys = (mutations == null) ? null : mutations;
|
||||
|
||||
|
@ -141,51 +153,11 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
|
|||
|
||||
@Override
|
||||
public T transform(ResultSet resultSet) {
|
||||
if (pojo != null && ((T) pojo).getClass().isAssignableFrom(ResultSet.class)) {
|
||||
return (T) pojo;
|
||||
} else {
|
||||
if (values.size() > 0) {
|
||||
Collection<HelenusProperty> properties = entity.getOrderedProperties();
|
||||
Map<String, Object> backingMap = new HashMap<String, Object>(properties.size());
|
||||
|
||||
// First, add all the inserted values into our new map.
|
||||
values.forEach(t -> backingMap.put(t._1.getProperty().getPropertyName(), t._2));
|
||||
|
||||
// Then, fill in all the rest of the properties.
|
||||
for (HelenusProperty prop : properties) {
|
||||
String key = prop.getPropertyName();
|
||||
if (backingMap.containsKey(key)) {
|
||||
// Some values man need to be converted (e.g. from String to Enum). This is done
|
||||
// within the BeanColumnValueProvider below.
|
||||
Optional<Function<Object, Object>> converter =
|
||||
prop.getReadConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
backingMap.put(key, converter.get().apply(backingMap.get(key)));
|
||||
}
|
||||
} else {
|
||||
// If we started this operation with an instance of this type, use values from that.
|
||||
if (pojo != null) {
|
||||
backingMap.put(key, BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
|
||||
} else {
|
||||
// Otherwise we'll use default values for the property type if available.
|
||||
Class<?> propType = prop.getJavaType();
|
||||
if (propType.isPrimitive()) {
|
||||
DefaultPrimitiveTypes type = DefaultPrimitiveTypes.lookup(propType);
|
||||
if (type == null) {
|
||||
throw new HelenusException("unknown primitive type " + propType);
|
||||
}
|
||||
backingMap.put(key, type.getDefaultValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Lastly, create a new proxy object for the entity and return the new instance.
|
||||
Class<?> iface = entity.getMappingInterface();
|
||||
pojo = Helenus.map(iface, backingMap);
|
||||
if (resultType.isAssignableFrom(iface)) {
|
||||
return TransformGeneric.INSTANCE.<T>transform(sessionOps, pojo, iface, values, entity.getOrderedProperties());
|
||||
}
|
||||
}
|
||||
return (T) pojo;
|
||||
return pojo;
|
||||
}
|
||||
|
||||
public InsertOperation<T> usingTtl(int ttl) {
|
||||
|
|
|
@ -45,6 +45,7 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
|||
protected List<Ordering> ordering = null;
|
||||
protected Integer limit = null;
|
||||
protected boolean allowFiltering = false;
|
||||
protected boolean ignoreSessionCache = false;
|
||||
|
||||
public SelectOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
|
@ -165,6 +166,18 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
|||
return this;
|
||||
}
|
||||
|
||||
protected AbstractCache getCache() {
|
||||
if (!ignoreSessionCache) {
|
||||
return sessionOps.getSessionCache();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public SelectOperation<E> ignoreCache() {
|
||||
ignoreSessionCache = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SelectOperation<E> limit(Integer limit) {
|
||||
this.limit = limit;
|
||||
return this;
|
||||
|
|
|
@ -6,16 +6,9 @@ import com.datastax.driver.core.Statement;
|
|||
import com.google.common.cache.Cache;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
||||
public class SessionCache extends AbstractCache {
|
||||
public class SessionCache extends AbstractCache<String, ResultSet> {
|
||||
|
||||
private final CacheManager manager;
|
||||
|
||||
SessionCache(CacheManager.Type type, CacheManager manager, Cache<String, ResultSet> cache) {
|
||||
super(type, cache);
|
||||
this.manager = manager;
|
||||
}
|
||||
|
||||
protected ResultSet fetch(
|
||||
protected ResultSet apply(
|
||||
Statement statement, OperationsDelegate delegate, ResultSetFuture resultSetFuture)
|
||||
throws InterruptedException, ExecutionException {
|
||||
final CacheKey key = delegate.getCacheKey();
|
||||
|
@ -28,7 +21,6 @@ public class SessionCache extends AbstractCache {
|
|||
if (resultSet == null) {
|
||||
resultSet = resultSetFuture.get();
|
||||
if (resultSet != null) {
|
||||
planEvictionFor(statement);
|
||||
cache.put(cacheKey, resultSet);
|
||||
}
|
||||
}
|
||||
|
@ -36,25 +28,6 @@ public class SessionCache extends AbstractCache {
|
|||
return resultSet;
|
||||
}
|
||||
|
||||
protected ResultSet mutate(
|
||||
Statement statement, OperationsDelegate delegate, ResultSetFuture resultSetFuture)
|
||||
throws InterruptedException, ExecutionException {
|
||||
CacheKey key = delegate.getCacheKey();
|
||||
final String cacheKey = key == null ? statement.toString() : key.toString();
|
||||
ResultSet resultSet = resultSetFuture.get();
|
||||
if (cacheKey != null && resultSet != null) {
|
||||
planEvictionFor(statement);
|
||||
//manager.evictIfNecessary(statement, delegate);
|
||||
cache.put(cacheKey, resultSet);
|
||||
}
|
||||
return resultSet;
|
||||
}
|
||||
|
||||
private void planEvictionFor(Statement statement) {
|
||||
//((Select)statement).table + statement.where.clauses.length == 0
|
||||
//TTL for rows read
|
||||
}
|
||||
|
||||
public ResultSet get(Statement statement, OperationsDelegate delegate) {
|
||||
final CacheKey key = delegate.getCacheKey();
|
||||
final String cacheKey = (key == null) ? CacheKey.of(statement) : key.toString();
|
||||
|
|
|
@ -0,0 +1,59 @@
|
|||
package net.helenus.core.operation;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.function.Function;
|
||||
|
||||
import net.helenus.core.AbstractSessionOperations;
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.reflect.DefaultPrimitiveTypes;
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
import net.helenus.mapping.value.BeanColumnValueProvider;
|
||||
import net.helenus.support.Fun;
|
||||
import net.helenus.support.HelenusException;
|
||||
|
||||
public enum TransformGeneric {
|
||||
INSTANCE;
|
||||
|
||||
public <T> T transform(AbstractSessionOperations sessionOps, T pojo, Class<?> mappingInterface, List<Fun.Tuple2<HelenusPropertyNode, Object>> assignments, Collection<HelenusProperty> properties) {
|
||||
if (assignments.size() > 0) {
|
||||
Map<String, Object> backingMap = new HashMap<String, Object>(properties.size());
|
||||
|
||||
// First, add all the inserted values into our new map.
|
||||
assignments.forEach(t -> backingMap.put(t._1.getProperty().getPropertyName(), t._2));
|
||||
|
||||
// Then, fill in all the rest of the properties.
|
||||
for (HelenusProperty prop : properties) {
|
||||
String key = prop.getPropertyName();
|
||||
if (backingMap.containsKey(key)) {
|
||||
// Some values man need to be converted (e.g. from String to Enum). This is done
|
||||
// within the BeanColumnValueProvider below.
|
||||
Optional<Function<Object, Object>> converter =
|
||||
prop.getReadConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
backingMap.put(key, converter.get().apply(backingMap.get(key)));
|
||||
}
|
||||
} else {
|
||||
// If we started this operation with an instance of this type, use values from that.
|
||||
if (pojo != null) {
|
||||
backingMap.put(key, BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
|
||||
} else {
|
||||
// Otherwise we'll use default values for the property type if available.
|
||||
Class<?> propType = prop.getJavaType();
|
||||
if (propType.isPrimitive()) {
|
||||
DefaultPrimitiveTypes type = DefaultPrimitiveTypes.lookup(propType);
|
||||
if (type == null) {
|
||||
throw new HelenusException("unknown primitive type " + propType);
|
||||
}
|
||||
backingMap.put(key, type.getDefaultValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Lastly, create a new proxy object for the entity and return the new instance.
|
||||
return (T) Helenus.map(mappingInterface, backingMap);
|
||||
}
|
||||
return (T) pojo;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,51 @@
|
|||
package net.helenus.core.operation;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.ResultSetFuture;
|
||||
import com.datastax.driver.core.Statement;
|
||||
import net.helenus.core.UnitOfWork;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
||||
public class UnitOfWorkCache extends AbstractCache<String, ResultSet> {
|
||||
|
||||
private final UnitOfWork uow;
|
||||
private final Map<String, ResultSet> cache = new HashMap<String, ResultSet>();
|
||||
private AbstractCache sessionCache;
|
||||
|
||||
public UnitOfWorkCache(UnitOfWork uow, AbstractCache sessionCache) {
|
||||
super();
|
||||
this.sessionCache = sessionCache;
|
||||
this.uow = uow;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ResultSet apply(Statement statement, OperationsDelegate delegate, ResultSetFuture resultSetFuture)
|
||||
throws InterruptedException, ExecutionException {
|
||||
|
||||
final CacheKey key = delegate.getCacheKey();
|
||||
final String cacheKey = (key == null) ? CacheKey.of(statement) : key.toString();
|
||||
ResultSet resultSet = null;
|
||||
if (cacheKey == null) {
|
||||
if (sessionCache != null) {
|
||||
ResultSet rs = sessionCache.apply(statement, delegate, resultSetFuture);
|
||||
if (rs != null) {
|
||||
return rs;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
resultSet = cache.get(cacheKey);
|
||||
if (resultSet != null) {
|
||||
return resultSet;
|
||||
}
|
||||
}
|
||||
resultSet = resultSetFuture.get();
|
||||
if (resultSet != null) {
|
||||
cache.put(cacheKey, resultSet);
|
||||
}
|
||||
return resultSet;
|
||||
}
|
||||
|
||||
}
|
|
@ -32,22 +32,29 @@ import net.helenus.mapping.MappingUtil;
|
|||
import net.helenus.support.HelenusMappingException;
|
||||
import net.helenus.support.Immutables;
|
||||
|
||||
public final class UpdateOperation extends AbstractFilterOperation<ResultSet, UpdateOperation> {
|
||||
public final class UpdateOperation<T> extends AbstractFilterOperation<T, UpdateOperation<T>> {
|
||||
|
||||
private HelenusEntity entity = null;
|
||||
|
||||
private final List<Assignment> assignments = new ArrayList<Assignment>();
|
||||
private final T pojo;
|
||||
|
||||
private int[] ttl;
|
||||
private long[] timestamp;
|
||||
|
||||
public UpdateOperation(AbstractSessionOperations sessionOperations){
|
||||
super(sessionOperations);
|
||||
this.pojo = null;
|
||||
}
|
||||
|
||||
public UpdateOperation(
|
||||
AbstractSessionOperations sessionOperations, HelenusPropertyNode p, Object v) {
|
||||
public UpdateOperation(AbstractSessionOperations sessionOperations, T pojo) {
|
||||
super(sessionOperations);
|
||||
this.pojo = pojo;
|
||||
}
|
||||
|
||||
public UpdateOperation(AbstractSessionOperations sessionOperations, HelenusPropertyNode p, Object v) {
|
||||
super(sessionOperations);
|
||||
this.pojo = null;
|
||||
|
||||
Object value = sessionOps.getValuePreparer().prepareColumnValue(v, p.getProperty());
|
||||
assignments.add(QueryBuilder.set(p.getColumnName(), value));
|
||||
|
@ -55,7 +62,7 @@ public final class UpdateOperation extends AbstractFilterOperation<ResultSet, Up
|
|||
addPropertyNode(p);
|
||||
}
|
||||
|
||||
public <V> UpdateOperation set(Getter<V> getter, V v) {
|
||||
public <V> UpdateOperation<T> set(Getter<V> getter, V v) {
|
||||
Objects.requireNonNull(getter, "getter is empty");
|
||||
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(getter);
|
||||
|
@ -76,11 +83,11 @@ public final class UpdateOperation extends AbstractFilterOperation<ResultSet, Up
|
|||
*
|
||||
*/
|
||||
|
||||
public <V> UpdateOperation increment(Getter<V> counterGetter) {
|
||||
public <V> UpdateOperation<T> increment(Getter<V> counterGetter) {
|
||||
return increment(counterGetter, 1L);
|
||||
}
|
||||
|
||||
public <V> UpdateOperation increment(Getter<V> counterGetter, long delta) {
|
||||
public <V> UpdateOperation<T> increment(Getter<V> counterGetter, long delta) {
|
||||
|
||||
Objects.requireNonNull(counterGetter, "counterGetter is empty");
|
||||
|
||||
|
@ -92,11 +99,11 @@ public final class UpdateOperation extends AbstractFilterOperation<ResultSet, Up
|
|||
return this;
|
||||
}
|
||||
|
||||
public <V> UpdateOperation decrement(Getter<V> counterGetter) {
|
||||
public <V> UpdateOperation<T> decrement(Getter<V> counterGetter) {
|
||||
return decrement(counterGetter, 1L);
|
||||
}
|
||||
|
||||
public <V> UpdateOperation decrement(Getter<V> counterGetter, long delta) {
|
||||
public <V> UpdateOperation<T> decrement(Getter<V> counterGetter, long delta) {
|
||||
|
||||
Objects.requireNonNull(counterGetter, "counterGetter is empty");
|
||||
|
||||
|
@ -115,7 +122,7 @@ public final class UpdateOperation extends AbstractFilterOperation<ResultSet, Up
|
|||
*
|
||||
*/
|
||||
|
||||
public <V> UpdateOperation prepend(Getter<List<V>> listGetter, V value) {
|
||||
public <V> UpdateOperation<T> prepend(Getter<List<V>> listGetter, V value) {
|
||||
|
||||
Objects.requireNonNull(listGetter, "listGetter is empty");
|
||||
Objects.requireNonNull(value, "value is empty");
|
||||
|
@ -129,7 +136,7 @@ public final class UpdateOperation extends AbstractFilterOperation<ResultSet, Up
|
|||
return this;
|
||||
}
|
||||
|
||||
public <V> UpdateOperation prependAll(Getter<List<V>> listGetter, List<V> value) {
|
||||
public <V> UpdateOperation<T> prependAll(Getter<List<V>> listGetter, List<V> value) {
|
||||
|
||||
Objects.requireNonNull(listGetter, "listGetter is empty");
|
||||
Objects.requireNonNull(value, "value is empty");
|
||||
|
@ -143,7 +150,7 @@ public final class UpdateOperation extends AbstractFilterOperation<ResultSet, Up
|
|||
return this;
|
||||
}
|
||||
|
||||
public <V> UpdateOperation setIdx(Getter<List<V>> listGetter, int idx, V value) {
|
||||
public <V> UpdateOperation<T> setIdx(Getter<List<V>> listGetter, int idx, V value) {
|
||||
|
||||
Objects.requireNonNull(listGetter, "listGetter is empty");
|
||||
Objects.requireNonNull(value, "value is empty");
|
||||
|
@ -157,7 +164,7 @@ public final class UpdateOperation extends AbstractFilterOperation<ResultSet, Up
|
|||
return this;
|
||||
}
|
||||
|
||||
public <V> UpdateOperation append(Getter<List<V>> listGetter, V value) {
|
||||
public <V> UpdateOperation<T> append(Getter<List<V>> listGetter, V value) {
|
||||
|
||||
Objects.requireNonNull(listGetter, "listGetter is empty");
|
||||
Objects.requireNonNull(value, "value is empty");
|
||||
|
@ -171,7 +178,7 @@ public final class UpdateOperation extends AbstractFilterOperation<ResultSet, Up
|
|||
return this;
|
||||
}
|
||||
|
||||
public <V> UpdateOperation appendAll(Getter<List<V>> listGetter, List<V> value) {
|
||||
public <V> UpdateOperation<T> appendAll(Getter<List<V>> listGetter, List<V> value) {
|
||||
|
||||
Objects.requireNonNull(listGetter, "listGetter is empty");
|
||||
Objects.requireNonNull(value, "value is empty");
|
||||
|
@ -185,7 +192,7 @@ public final class UpdateOperation extends AbstractFilterOperation<ResultSet, Up
|
|||
return this;
|
||||
}
|
||||
|
||||
public <V> UpdateOperation discard(Getter<List<V>> listGetter, V value) {
|
||||
public <V> UpdateOperation<T> discard(Getter<List<V>> listGetter, V value) {
|
||||
|
||||
Objects.requireNonNull(listGetter, "listGetter is empty");
|
||||
Objects.requireNonNull(value, "value is empty");
|
||||
|
@ -199,7 +206,7 @@ public final class UpdateOperation extends AbstractFilterOperation<ResultSet, Up
|
|||
return this;
|
||||
}
|
||||
|
||||
public <V> UpdateOperation discardAll(Getter<List<V>> listGetter, List<V> value) {
|
||||
public <V> UpdateOperation<T> discardAll(Getter<List<V>> listGetter, List<V> value) {
|
||||
|
||||
Objects.requireNonNull(listGetter, "listGetter is empty");
|
||||
Objects.requireNonNull(value, "value is empty");
|
||||
|
@ -251,7 +258,7 @@ public final class UpdateOperation extends AbstractFilterOperation<ResultSet, Up
|
|||
*
|
||||
*/
|
||||
|
||||
public <V> UpdateOperation add(Getter<Set<V>> setGetter, V value) {
|
||||
public <V> UpdateOperation<T> add(Getter<Set<V>> setGetter, V value) {
|
||||
|
||||
Objects.requireNonNull(setGetter, "setGetter is empty");
|
||||
Objects.requireNonNull(value, "value is empty");
|
||||
|
@ -265,7 +272,7 @@ public final class UpdateOperation extends AbstractFilterOperation<ResultSet, Up
|
|||
return this;
|
||||
}
|
||||
|
||||
public <V> UpdateOperation addAll(Getter<Set<V>> setGetter, Set<V> value) {
|
||||
public <V> UpdateOperation<T> addAll(Getter<Set<V>> setGetter, Set<V> value) {
|
||||
|
||||
Objects.requireNonNull(setGetter, "setGetter is empty");
|
||||
Objects.requireNonNull(value, "value is empty");
|
||||
|
@ -279,7 +286,7 @@ public final class UpdateOperation extends AbstractFilterOperation<ResultSet, Up
|
|||
return this;
|
||||
}
|
||||
|
||||
public <V> UpdateOperation remove(Getter<Set<V>> setGetter, V value) {
|
||||
public <V> UpdateOperation<T> remove(Getter<Set<V>> setGetter, V value) {
|
||||
|
||||
Objects.requireNonNull(setGetter, "setGetter is empty");
|
||||
Objects.requireNonNull(value, "value is empty");
|
||||
|
@ -293,7 +300,7 @@ public final class UpdateOperation extends AbstractFilterOperation<ResultSet, Up
|
|||
return this;
|
||||
}
|
||||
|
||||
public <V> UpdateOperation removeAll(Getter<Set<V>> setGetter, Set<V> value) {
|
||||
public <V> UpdateOperation<T> removeAll(Getter<Set<V>> setGetter, Set<V> value) {
|
||||
|
||||
Objects.requireNonNull(setGetter, "setGetter is empty");
|
||||
Objects.requireNonNull(value, "value is empty");
|
||||
|
@ -344,7 +351,7 @@ public final class UpdateOperation extends AbstractFilterOperation<ResultSet, Up
|
|||
*
|
||||
*/
|
||||
|
||||
public <K, V> UpdateOperation put(Getter<Map<K, V>> mapGetter, K key, V value) {
|
||||
public <K, V> UpdateOperation<T> put(Getter<Map<K, V>> mapGetter, K key, V value) {
|
||||
|
||||
Objects.requireNonNull(mapGetter, "mapGetter is empty");
|
||||
Objects.requireNonNull(key, "key is empty");
|
||||
|
@ -368,7 +375,7 @@ public final class UpdateOperation extends AbstractFilterOperation<ResultSet, Up
|
|||
return this;
|
||||
}
|
||||
|
||||
public <K, V> UpdateOperation putAll(Getter<Map<K, V>> mapGetter, Map<K, V> map) {
|
||||
public <K, V> UpdateOperation<T> putAll(Getter<Map<K, V>> mapGetter, Map<K, V> map) {
|
||||
|
||||
Objects.requireNonNull(mapGetter, "mapGetter is empty");
|
||||
Objects.requireNonNull(map, "map is empty");
|
||||
|
@ -419,6 +426,7 @@ public final class UpdateOperation extends AbstractFilterOperation<ResultSet, Up
|
|||
if (this.ttl != null) {
|
||||
update.using(QueryBuilder.ttl(this.ttl[0]));
|
||||
}
|
||||
|
||||
if (this.timestamp != null) {
|
||||
update.using(QueryBuilder.timestamp(this.timestamp[0]));
|
||||
}
|
||||
|
@ -427,17 +435,17 @@ public final class UpdateOperation extends AbstractFilterOperation<ResultSet, Up
|
|||
}
|
||||
|
||||
@Override
|
||||
public ResultSet transform(ResultSet resultSet) {
|
||||
return resultSet;
|
||||
public T transform(ResultSet resultSet) {
|
||||
return (T) resultSet;
|
||||
}
|
||||
|
||||
public UpdateOperation usingTtl(int ttl) {
|
||||
public UpdateOperation<T> usingTtl(int ttl) {
|
||||
this.ttl = new int[1];
|
||||
this.ttl[0] = ttl;
|
||||
return this;
|
||||
}
|
||||
|
||||
public UpdateOperation usingTimestamp(long timestamp) {
|
||||
public UpdateOperation<T> usingTimestamp(long timestamp) {
|
||||
this.timestamp = new long[1];
|
||||
this.timestamp[0] = timestamp;
|
||||
return this;
|
||||
|
|
|
@ -67,8 +67,11 @@ public class CollectionTest extends AbstractEmbeddedCassandraTest {
|
|||
|
||||
// read full object
|
||||
|
||||
Customer actual =
|
||||
session.select(Customer.class).where(customer::id, eq(id)).sync().findFirst().get();
|
||||
Customer actual = session.<Customer>select(customer)
|
||||
.where(customer::id, eq(id))
|
||||
.single()
|
||||
.sync()
|
||||
.orElse(null);
|
||||
Assert.assertEquals(id, actual.id());
|
||||
Assert.assertEquals(aliases, actual.aliases());
|
||||
Assert.assertNull(actual.names());
|
||||
|
@ -87,7 +90,12 @@ public class CollectionTest extends AbstractEmbeddedCassandraTest {
|
|||
|
||||
session.update().set(customer::aliases, expected).where(customer::id, eq(id)).sync();
|
||||
|
||||
actual = session.select(Customer.class).where(customer::id, eq(id)).sync().findFirst().get();
|
||||
actual = session.<Customer>select(customer)
|
||||
.where(customer::id, eq(id))
|
||||
.single()
|
||||
.sync()
|
||||
.orElse(null);
|
||||
|
||||
Assert.assertEquals(id, actual.id());
|
||||
Assert.assertEquals(expected, actual.aliases());
|
||||
|
||||
|
@ -162,8 +170,12 @@ public class CollectionTest extends AbstractEmbeddedCassandraTest {
|
|||
|
||||
// read full object
|
||||
|
||||
Customer actual =
|
||||
session.select(Customer.class).where(customer::id, eq(id)).sync().findFirst().get();
|
||||
Customer actual = session.<Customer>select(customer)
|
||||
.where(customer::id, eq(id))
|
||||
.single()
|
||||
.sync()
|
||||
.orElse(null);
|
||||
|
||||
Assert.assertEquals(id, actual.id());
|
||||
Assert.assertEquals(names, actual.names());
|
||||
Assert.assertNull(actual.aliases());
|
||||
|
@ -188,7 +200,12 @@ public class CollectionTest extends AbstractEmbeddedCassandraTest {
|
|||
|
||||
session.update().set(customer::names, expected).where(customer::id, eq(id)).sync();
|
||||
|
||||
actual = session.select(Customer.class).where(customer::id, eq(id)).sync().findFirst().get();
|
||||
actual = session.<Customer>select(customer)
|
||||
.where(customer::id, eq(id))
|
||||
.single()
|
||||
.sync()
|
||||
.orElse(null);
|
||||
|
||||
Assert.assertEquals(id, actual.id());
|
||||
Assert.assertEquals(expected, actual.names());
|
||||
|
||||
|
@ -289,8 +306,11 @@ public class CollectionTest extends AbstractEmbeddedCassandraTest {
|
|||
|
||||
// read full object
|
||||
|
||||
Customer actual =
|
||||
session.select(Customer.class).where(customer::id, eq(id)).sync().findFirst().get();
|
||||
Customer actual = session.<Customer>select(customer)
|
||||
.where(customer::id, eq(id)).single()
|
||||
.sync()
|
||||
.orElse(null);
|
||||
|
||||
Assert.assertEquals(id, actual.id());
|
||||
Assert.assertEquals(props, actual.properties());
|
||||
Assert.assertNull(actual.aliases());
|
||||
|
@ -323,7 +343,11 @@ public class CollectionTest extends AbstractEmbeddedCassandraTest {
|
|||
|
||||
session.update().set(customer::properties, expected).where(customer::id, eq(id)).sync();
|
||||
|
||||
actual = session.select(Customer.class).where(customer::id, eq(id)).sync().findFirst().get();
|
||||
actual = session.<Customer>select(customer)
|
||||
.where(customer::id, eq(id))
|
||||
.single()
|
||||
.sync()
|
||||
.orElse(null);
|
||||
Assert.assertEquals(id, actual.id());
|
||||
Assert.assertEquals(expected, actual.properties());
|
||||
|
||||
|
|
|
@ -51,7 +51,7 @@ public class HierarchyTest extends AbstractEmbeddedCassandraTest {
|
|||
.sync();
|
||||
|
||||
Optional<Cat> animal =
|
||||
session.select(Cat.class).where(cat::nickname, eq("garfield")).sync().findFirst();
|
||||
session.<Cat>select(Cat.class).where(cat::nickname, eq("garfield")).sync().findFirst();
|
||||
Assert.assertTrue(animal.isPresent());
|
||||
Assert.assertFalse(animal.get().eatable());
|
||||
}
|
||||
|
@ -64,8 +64,7 @@ public class HierarchyTest extends AbstractEmbeddedCassandraTest {
|
|||
.value(cat::nickname, "garfield")
|
||||
.value(cat::eatable, false)
|
||||
.sync();
|
||||
Optional<Cat> animal =
|
||||
session.select(Cat.class).where(cat::nickname, eq("garfield")).sync().findFirst();
|
||||
Optional<Cat> animal = session.select(Cat.class).where(cat::nickname, eq("garfield")).single().sync();
|
||||
Assert.assertTrue(animal.isPresent());
|
||||
|
||||
Cat cat = animal.get();
|
||||
|
|
|
@ -70,7 +70,7 @@ public class PreparedStatementTest extends AbstractEmbeddedCassandraTest {
|
|||
|
||||
selectOp =
|
||||
session
|
||||
.select(Car.class)
|
||||
.<Car>select(car)
|
||||
.where(car::make, Query.eq(Query.marker()))
|
||||
.and(car::model, Query.eq(Query.marker()))
|
||||
.prepare();
|
||||
|
|
|
@ -28,10 +28,7 @@ import org.junit.Assert;
|
|||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.*;
|
||||
|
||||
|
||||
public class SimpleUserTest extends AbstractEmbeddedCassandraTest {
|
||||
|
@ -103,7 +100,11 @@ public class SimpleUserTest extends AbstractEmbeddedCassandraTest {
|
|||
|
||||
// select as object
|
||||
|
||||
actual = session.select(User.class).where(user::id, eq(100L)).sync().findFirst().get();
|
||||
actual = session.<User>select(user)
|
||||
.where(user::id, eq(100L))
|
||||
.single()
|
||||
.sync()
|
||||
.orElse(null);
|
||||
assertUsers(newUser, actual);
|
||||
|
||||
// select by columns
|
||||
|
@ -217,22 +218,35 @@ public class SimpleUserTest extends AbstractEmbeddedCassandraTest {
|
|||
|
||||
Assert.assertEquals("_albert", name);
|
||||
|
||||
User u = session.select(User.class).where(user::id, eq(100L)).sync().findFirst().get();
|
||||
User u = session.<User>select(user)
|
||||
.where(user::id, eq(100L))
|
||||
.single()
|
||||
.sync()
|
||||
.orElse(null);
|
||||
|
||||
Assert.assertEquals(Long.valueOf(100L), u.id());
|
||||
Assert.assertEquals("albert", u.name());
|
||||
Assert.assertEquals(Integer.valueOf(35), u.age());
|
||||
|
||||
// INSERT
|
||||
//
|
||||
User greg =
|
||||
session
|
||||
.<User>insert()
|
||||
.<User>insert(user)
|
||||
.value(user::name, "greg")
|
||||
.value(user::age, 44)
|
||||
.value(user::type, UserType.USER)
|
||||
.value(user::id, 1234L)
|
||||
.sync();
|
||||
|
||||
Optional<User> maybeGreg =
|
||||
session
|
||||
.<User>select(user)
|
||||
.where(user::id, eq(1234L))
|
||||
.single()
|
||||
.sync();
|
||||
|
||||
// INSERT
|
||||
|
||||
session
|
||||
.update()
|
||||
.set(user::name, null)
|
||||
|
|
|
@ -104,7 +104,7 @@ public class StaticColumnTest extends AbstractEmbeddedCassandraTest {
|
|||
|
||||
List<Message> actual =
|
||||
session
|
||||
.select(Message.class)
|
||||
.<Message>select(message)
|
||||
.where(message::id, Query.eq(123))
|
||||
.sync()
|
||||
.collect(Collectors.toList());
|
||||
|
|
|
@ -40,7 +40,7 @@ public class TupleKeyMapTest extends TupleCollectionTest {
|
|||
|
||||
// read full object
|
||||
|
||||
Book actual = session.select(Book.class).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
Book actual = session.<Book>select(book).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualMaps(notes, actual.notes());
|
||||
Assert.assertNull(actual.reviewers());
|
||||
|
@ -71,7 +71,7 @@ public class TupleKeyMapTest extends TupleCollectionTest {
|
|||
|
||||
session.update().set(book::notes, expected).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actual = session.select(Book.class).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
actual = session.<Book>select(book).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualMaps(expected, actual.notes());
|
||||
|
||||
|
|
|
@ -42,7 +42,11 @@ public class TupleMapTest extends TupleCollectionTest {
|
|||
|
||||
// read full object
|
||||
|
||||
Book actual = session.select(Book.class).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
Book actual = session.<Book>select(book)
|
||||
.where(book::id, Query.eq(id))
|
||||
.single()
|
||||
.sync()
|
||||
.orElse(null);
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualMaps(writers, actual.writers());
|
||||
Assert.assertNull(actual.reviewers());
|
||||
|
@ -73,7 +77,12 @@ public class TupleMapTest extends TupleCollectionTest {
|
|||
|
||||
session.update().set(book::writers, expected).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actual = session.select(Book.class).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
actual = session.<Book>select(book)
|
||||
.where(book::id, Query.eq(id))
|
||||
.single()
|
||||
.sync()
|
||||
.orElse(null);
|
||||
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualMaps(expected, actual.writers());
|
||||
|
||||
|
@ -87,8 +96,7 @@ public class TupleMapTest extends TupleCollectionTest {
|
|||
expected.put(third, unk);
|
||||
session.update().put(book::writers, third, unk).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualMap =
|
||||
session.select(book::writers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
actualMap = session.select(book::writers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
|
||||
// putAll operation
|
||||
|
|
|
@ -42,7 +42,7 @@ public class UDTKeyMapTest extends UDTCollectionTest {
|
|||
|
||||
// read full object
|
||||
|
||||
Book actual = session.select(Book.class).where(book::id, eq(id)).sync().findFirst().get();
|
||||
Book actual = session.<Book>select(book).where(book::id, eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualMaps(notes, actual.notes());
|
||||
Assert.assertNull(actual.reviewers());
|
||||
|
@ -70,7 +70,7 @@ public class UDTKeyMapTest extends UDTCollectionTest {
|
|||
|
||||
session.update().set(book::notes, expected).where(book::id, eq(id)).sync();
|
||||
|
||||
actual = session.select(Book.class).where(book::id, eq(id)).sync().findFirst().get();
|
||||
actual = session.<Book>select(book).where(book::id, eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualMaps(expected, actual.notes());
|
||||
|
||||
|
|
|
@ -40,7 +40,7 @@ public class UDTListTest extends UDTCollectionTest {
|
|||
|
||||
// read full object
|
||||
|
||||
Book actual = session.select(Book.class).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
Book actual = session.<Book>select(book).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualLists(authors, actual.authors());
|
||||
Assert.assertNull(actual.reviewers());
|
||||
|
@ -65,7 +65,7 @@ public class UDTListTest extends UDTCollectionTest {
|
|||
|
||||
session.update().set(book::authors, expected).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actual = session.select(Book.class).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
actual = session.<Book>select(book).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualLists(expected, actual.authors());
|
||||
|
||||
|
|
|
@ -40,7 +40,7 @@ public class UDTMapTest extends UDTCollectionTest {
|
|||
|
||||
// read full object
|
||||
|
||||
Book actual = session.select(Book.class).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
Book actual = session.<Book>select(book).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualMaps(writers, actual.writers());
|
||||
Assert.assertNull(actual.reviewers());
|
||||
|
@ -71,7 +71,7 @@ public class UDTMapTest extends UDTCollectionTest {
|
|||
|
||||
session.update().set(book::writers, expected).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actual = session.select(Book.class).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
actual = session.<Book>select(book).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualMaps(expected, actual.writers());
|
||||
|
||||
|
|
|
@ -15,14 +15,9 @@
|
|||
*/
|
||||
package net.helenus.test.integration.core.unitofwork;
|
||||
|
||||
import com.datastax.driver.core.utils.UUIDs;
|
||||
import net.bytebuddy.utility.RandomString;
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.core.UnitOfWork;
|
||||
import net.helenus.mapping.annotation.Column;
|
||||
import net.helenus.mapping.annotation.PartitionKey;
|
||||
import net.helenus.mapping.annotation.Table;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
import org.junit.Assert;
|
||||
import org.junit.BeforeClass;
|
||||
|
@ -31,30 +26,18 @@ import org.junit.Test;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
|
||||
@Table
|
||||
interface Widget {
|
||||
@PartitionKey UUID id();
|
||||
@Column String name();
|
||||
}
|
||||
|
||||
|
||||
public class AndThenOrderTest extends AbstractEmbeddedCassandraTest {
|
||||
|
||||
static Widget widgets;
|
||||
|
||||
static HelenusSession session;
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeTest() {
|
||||
session = Helenus.init(getSession())
|
||||
.showCql()
|
||||
.add(Widget.class)
|
||||
.autoCreateDrop()
|
||||
.get();
|
||||
widgets = session.dsl(Widget.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -15,38 +15,96 @@
|
|||
*/
|
||||
package net.helenus.test.integration.core.unitofwork;
|
||||
|
||||
import com.datastax.driver.core.utils.UUIDs;
|
||||
import net.bytebuddy.utility.RandomString;
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.core.UnitOfWork;
|
||||
import net.helenus.core.annotation.Cacheable;
|
||||
import net.helenus.mapping.annotation.Column;
|
||||
import net.helenus.mapping.annotation.PartitionKey;
|
||||
import net.helenus.mapping.annotation.Table;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
import net.helenus.test.integration.core.unitofwork.FilesystemNode;
|
||||
import org.junit.Assert;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
import static net.helenus.core.Query.eq;
|
||||
|
||||
|
||||
@Table
|
||||
@Cacheable
|
||||
interface Widget {
|
||||
@PartitionKey
|
||||
UUID id();
|
||||
@Column
|
||||
String name();
|
||||
}
|
||||
|
||||
|
||||
public class UnitOfWorkTest extends AbstractEmbeddedCassandraTest {
|
||||
|
||||
static FilesystemNode node;
|
||||
|
||||
static Widget widget;
|
||||
static HelenusSession session;
|
||||
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeTest() {
|
||||
session = Helenus.init(getSession())
|
||||
.showCql()
|
||||
.add(FilesystemNode.class)
|
||||
.add(Widget.class)
|
||||
.autoCreateDrop()
|
||||
.get();
|
||||
node = session.dsl(FilesystemNode.class);
|
||||
widget = session.dsl(Widget.class);
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
@Test
|
||||
public void testCruid() throws Exception {
|
||||
session.insert()
|
||||
.value(widgets::id, UUIDs.timeBased())
|
||||
.value(widgets::name, RandomString.make(20))
|
||||
.sync(uow5);
|
||||
public void testSelectAfterInsertProperlyCachesEntity() throws Exception {
|
||||
Widget w1, w2, w3, w4;
|
||||
UUID key = UUIDs.timeBased();
|
||||
|
||||
try (UnitOfWork uow = session.begin()) {
|
||||
|
||||
// This should cache the inserted Widget.
|
||||
w1 = session.<Widget>upsert(widget)
|
||||
.value(widget::id, key)
|
||||
.value(widget::name, RandomString.make(20))
|
||||
.sync(uow);
|
||||
|
||||
// This should read from the cache and get the same instance of a Widget.
|
||||
w2 = session.<Widget>select(widget)
|
||||
.where(widget::id, eq(key))
|
||||
.single()
|
||||
.sync(uow)
|
||||
.orElse(null);
|
||||
|
||||
uow.commit()
|
||||
.andThen(() -> {
|
||||
Assert.assertEquals(w1, w2);
|
||||
});
|
||||
}
|
||||
*/
|
||||
|
||||
// This should read the widget from the session cache and maintain object identity.
|
||||
w3 = session.<Widget>select(widget)
|
||||
.where(widget::id, eq(key))
|
||||
.single()
|
||||
.sync()
|
||||
.orElse(null);
|
||||
|
||||
Assert.assertEquals(w1, w3);
|
||||
|
||||
// This should read the widget from the database, no object identity but values should match.
|
||||
w4 = session.<Widget>select(widget)
|
||||
.where(widget::id, eq(key))
|
||||
.ignoreCache()
|
||||
.single()
|
||||
.sync()
|
||||
.orElse(null);
|
||||
|
||||
Assert.assertNotEquals(w1, w4);
|
||||
Assert.assertTrue(w1.equals(w4));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue