Merge branch 'develop'
This commit is contained in:
commit
9675c10772
116 changed files with 4576 additions and 3959 deletions
|
@ -1,9 +1,9 @@
|
|||
package net.helenus.core;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.io.Serializable;
|
||||
import java.util.*;
|
||||
|
||||
import org.apache.commons.lang3.SerializationUtils;
|
||||
|
||||
import com.google.common.primitives.Primitives;
|
||||
|
||||
|
@ -30,12 +30,12 @@ public abstract class AbstractEntityDraft<E> implements Drafted<E> {
|
|||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected <T> T get(Getter<T> getter, Class<?> returnType) {
|
||||
public <T> T get(Getter<T> getter, Class<?> returnType) {
|
||||
return (T) get(this.<T>methodNameFor(getter), returnType);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected <T> T get(String key, Class<?> returnType) {
|
||||
public <T> T get(String key, Class<?> returnType) {
|
||||
T value = (T) backingMap.get(key);
|
||||
|
||||
if (value == null) {
|
||||
|
@ -51,17 +51,27 @@ public abstract class AbstractEntityDraft<E> implements Drafted<E> {
|
|||
|
||||
return (T) type.getDefaultValue();
|
||||
}
|
||||
} else {
|
||||
// Collections fetched from the entityMap
|
||||
if (value instanceof Collection) {
|
||||
try {
|
||||
value = MappingUtil.<T>clone(value);
|
||||
} catch (CloneNotSupportedException e) {
|
||||
// TODO(gburd): deep?shallow? copy of List, Map, Set to a mutable collection.
|
||||
value = (T) SerializationUtils.<Serializable>clone((Serializable) value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
protected <T> Object set(Getter<T> getter, Object value) {
|
||||
public <T> Object set(Getter<T> getter, Object value) {
|
||||
return set(this.<T>methodNameFor(getter), value);
|
||||
}
|
||||
|
||||
protected Object set(String key, Object value) {
|
||||
public Object set(String key, Object value) {
|
||||
if (key == null || value == null) {
|
||||
return null;
|
||||
}
|
||||
|
@ -71,11 +81,11 @@ public abstract class AbstractEntityDraft<E> implements Drafted<E> {
|
|||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected <T> T mutate(Getter<T> getter, T value) {
|
||||
public <T> T mutate(Getter<T> getter, T value) {
|
||||
return (T) mutate(this.<T>methodNameFor(getter), value);
|
||||
}
|
||||
|
||||
protected Object mutate(String key, Object value) {
|
||||
public Object mutate(String key, Object value) {
|
||||
Objects.requireNonNull(key);
|
||||
|
||||
if (value == null) {
|
||||
|
|
|
@ -24,14 +24,16 @@ import org.slf4j.LoggerFactory;
|
|||
|
||||
import com.codahale.metrics.MetricRegistry;
|
||||
import com.datastax.driver.core.*;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
import com.google.common.base.Stopwatch;
|
||||
import com.google.common.collect.Table;
|
||||
import com.google.common.util.concurrent.ListenableFuture;
|
||||
|
||||
import brave.Tracer;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.core.operation.Operation;
|
||||
import net.helenus.mapping.value.ColumnValuePreparer;
|
||||
import net.helenus.mapping.value.ColumnValueProvider;
|
||||
import net.helenus.support.Either;
|
||||
import net.helenus.support.HelenusException;
|
||||
|
||||
public abstract class AbstractSessionOperations {
|
||||
|
@ -60,7 +62,7 @@ public abstract class AbstractSessionOperations {
|
|||
|
||||
public PreparedStatement prepare(RegularStatement statement) {
|
||||
try {
|
||||
log(statement, false);
|
||||
logStatement(statement, false);
|
||||
return currentSession().prepare(statement);
|
||||
} catch (RuntimeException e) {
|
||||
throw translateException(e);
|
||||
|
@ -69,7 +71,7 @@ public abstract class AbstractSessionOperations {
|
|||
|
||||
public ListenableFuture<PreparedStatement> prepareAsync(RegularStatement statement) {
|
||||
try {
|
||||
log(statement, false);
|
||||
logStatement(statement, false);
|
||||
return currentSession().prepareAsync(statement);
|
||||
} catch (RuntimeException e) {
|
||||
throw translateException(e);
|
||||
|
@ -77,37 +79,47 @@ public abstract class AbstractSessionOperations {
|
|||
}
|
||||
|
||||
public ResultSet execute(Statement statement, boolean showValues) {
|
||||
return executeAsync(statement, showValues).getUninterruptibly();
|
||||
return execute(statement, null, null, showValues);
|
||||
}
|
||||
|
||||
public ResultSet execute(Statement statement, Stopwatch timer, boolean showValues) {
|
||||
return execute(statement, null, timer, showValues);
|
||||
}
|
||||
|
||||
public ResultSet execute(Statement statement, UnitOfWork uow, boolean showValues) {
|
||||
return execute(statement, uow, null, showValues);
|
||||
}
|
||||
|
||||
public ResultSet execute(Statement statement, UnitOfWork uow, Stopwatch timer, boolean showValues) {
|
||||
return executeAsync(statement, uow, timer, showValues).getUninterruptibly();
|
||||
}
|
||||
|
||||
public ResultSetFuture executeAsync(Statement statement, boolean showValues) {
|
||||
return executeAsync(statement, null, null, showValues);
|
||||
}
|
||||
|
||||
public ResultSetFuture executeAsync(Statement statement, Stopwatch timer, boolean showValues) {
|
||||
return executeAsync(statement, null, timer, showValues);
|
||||
}
|
||||
|
||||
public ResultSetFuture executeAsync(Statement statement, UnitOfWork uow, boolean showValues) {
|
||||
return executeAsync(statement, uow, null, showValues);
|
||||
}
|
||||
|
||||
public ResultSetFuture executeAsync(Statement statement, UnitOfWork uow, Stopwatch timer, boolean showValues) {
|
||||
try {
|
||||
log(statement, showValues);
|
||||
logStatement(statement, showValues);
|
||||
return currentSession().executeAsync(statement);
|
||||
} catch (RuntimeException e) {
|
||||
throw translateException(e);
|
||||
}
|
||||
}
|
||||
|
||||
void log(Statement statement, boolean showValues) {
|
||||
if (LOG.isInfoEnabled()) {
|
||||
LOG.info("Execute statement " + statement);
|
||||
}
|
||||
private void logStatement(Statement statement, boolean showValues) {
|
||||
if (isShowCql()) {
|
||||
if (statement instanceof BuiltStatement) {
|
||||
BuiltStatement builtStatement = (BuiltStatement) statement;
|
||||
if (showValues) {
|
||||
RegularStatement regularStatement = builtStatement.setForceNoValues(true);
|
||||
printCql(regularStatement.getQueryString());
|
||||
} else {
|
||||
printCql(builtStatement.getQueryString());
|
||||
}
|
||||
} else if (statement instanceof RegularStatement) {
|
||||
RegularStatement regularStatement = (RegularStatement) statement;
|
||||
printCql(regularStatement.getQueryString());
|
||||
} else {
|
||||
printCql(statement.toString());
|
||||
}
|
||||
printCql(Operation.queryString(statement, showValues));
|
||||
} else if (LOG.isInfoEnabled()) {
|
||||
LOG.info("CQL> " + Operation.queryString(statement, showValues));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -119,7 +131,7 @@ public abstract class AbstractSessionOperations {
|
|||
return null;
|
||||
}
|
||||
|
||||
public void mergeCache(Table<String, String, Object> cache) {
|
||||
public void mergeCache(Table<String, String, Either<Object, List<Facet>>> uowCache) {
|
||||
}
|
||||
|
||||
RuntimeException translateException(RuntimeException e) {
|
||||
|
@ -139,4 +151,7 @@ public abstract class AbstractSessionOperations {
|
|||
void printCql(String cql) {
|
||||
getPrintStream().println(cql);
|
||||
}
|
||||
|
||||
public void cacheEvict(List<Facet> facets) {
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,8 +15,11 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import static net.helenus.core.HelenusSession.deleted;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -29,6 +32,7 @@ import com.google.common.collect.TreeTraverser;
|
|||
|
||||
import net.helenus.core.cache.CacheUtil;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.support.Either;
|
||||
|
||||
/** Encapsulates the concept of a "transaction" as a unit-of-work. */
|
||||
public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfWork<E>, AutoCloseable {
|
||||
|
@ -38,15 +42,18 @@ public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfW
|
|||
private final List<AbstractUnitOfWork<E>> nested = new ArrayList<>();
|
||||
private final HelenusSession session;
|
||||
private final AbstractUnitOfWork<E> parent;
|
||||
// Cache:
|
||||
private final Table<String, String, Object> cache = HashBasedTable.create();
|
||||
private final Table<String, String, Either<Object, List<Facet>>> cache = HashBasedTable.create();
|
||||
protected String purpose;
|
||||
protected List<String> nestedPurposes = new ArrayList<String>();
|
||||
protected int cacheHits = 0;
|
||||
protected int cacheMisses = 0;
|
||||
protected int databaseLookups = 0;
|
||||
protected Stopwatch elapsedTime;
|
||||
protected Map<String, Double> databaseTime = new HashMap<>();
|
||||
protected double cacheLookupTime = 0.0;
|
||||
private List<CommitThunk> postCommit = new ArrayList<CommitThunk>();
|
||||
private boolean aborted = false;
|
||||
private boolean committed = false;
|
||||
private String purpose_;
|
||||
private Stopwatch elapsedTime_;
|
||||
private Stopwatch databaseTime_ = Stopwatch.createUnstarted();
|
||||
private Stopwatch cacheLookupTime_ = Stopwatch.createUnstarted();
|
||||
|
||||
protected AbstractUnitOfWork(HelenusSession session, AbstractUnitOfWork<E> parent) {
|
||||
Objects.requireNonNull(session, "containing session cannot be null");
|
||||
|
@ -56,13 +63,18 @@ public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfW
|
|||
}
|
||||
|
||||
@Override
|
||||
public Stopwatch getExecutionTimer() {
|
||||
return databaseTime_;
|
||||
public void addDatabaseTime(String name, Stopwatch amount) {
|
||||
Double time = databaseTime.get(name);
|
||||
if (time == null) {
|
||||
databaseTime.put(name, (double) amount.elapsed(TimeUnit.MICROSECONDS));
|
||||
} else {
|
||||
databaseTime.put(name, time + amount.elapsed(TimeUnit.MICROSECONDS));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Stopwatch getCacheLookupTimer() {
|
||||
return cacheLookupTime_;
|
||||
public void addCacheLookupTime(Stopwatch amount) {
|
||||
cacheLookupTime += amount.elapsed(TimeUnit.MICROSECONDS);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -73,26 +85,72 @@ public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfW
|
|||
}
|
||||
|
||||
@Override
|
||||
public UnitOfWork<E> begin() {
|
||||
elapsedTime_ = Stopwatch.createStarted();
|
||||
public synchronized UnitOfWork<E> begin() {
|
||||
if (LOG.isInfoEnabled()) {
|
||||
elapsedTime = Stopwatch.createStarted();
|
||||
}
|
||||
// log.record(txn::start)
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getPurpose() {
|
||||
return purpose;
|
||||
}
|
||||
|
||||
@Override
|
||||
public UnitOfWork setPurpose(String purpose) {
|
||||
purpose_ = purpose;
|
||||
this.purpose = purpose;
|
||||
return this;
|
||||
}
|
||||
|
||||
public void logTimers(String what) {
|
||||
double e = (double) elapsedTime_.elapsed(TimeUnit.MICROSECONDS) / 1000.0;
|
||||
double d = (double) databaseTime_.elapsed(TimeUnit.MICROSECONDS) / 1000.0;
|
||||
double c = (double) cacheLookupTime_.elapsed(TimeUnit.MICROSECONDS) / 1000.0;
|
||||
double fd = (d / (e - c)) * 100.0;
|
||||
double fc = (c / (e - d)) * 100.0;
|
||||
LOG.info(String.format("UOW(%s)%s %s (total: %.3fms cache: %.3fms %2.2f%% db: %.3fms %2.2f%%)", hashCode(),
|
||||
(purpose_ == null ? "" : " " + purpose_), what, e, c, fc, d, fd));
|
||||
@Override
|
||||
public void recordCacheAndDatabaseOperationCount(int cache, int ops) {
|
||||
if (cache > 0) {
|
||||
cacheHits += cache;
|
||||
} else {
|
||||
cacheMisses += Math.abs(cache);
|
||||
}
|
||||
if (ops > 0) {
|
||||
databaseLookups += ops;
|
||||
}
|
||||
}
|
||||
|
||||
public String logTimers(String what) {
|
||||
double e = (double) elapsedTime.elapsed(TimeUnit.MICROSECONDS) / 1000.0;
|
||||
double d = 0.0;
|
||||
double c = cacheLookupTime / 1000.0;
|
||||
double fc = (c / e) * 100.0;
|
||||
String database = "";
|
||||
if (databaseTime.size() > 0) {
|
||||
List<String> dbt = new ArrayList<>(databaseTime.size());
|
||||
for (String name : databaseTime.keySet()) {
|
||||
double t = databaseTime.get(name) / 1000.0;
|
||||
d += t;
|
||||
dbt.add(String.format("%s took %,.3fms %,2.2f%%", name, t, (t / e) * 100.0));
|
||||
}
|
||||
double fd = (d / e) * 100.0;
|
||||
database = String.format(", %d quer%s (%,.3fms %,2.2f%% - %s)", databaseLookups,
|
||||
(databaseLookups > 1) ? "ies" : "y", d, fd, String.join(", ", dbt));
|
||||
}
|
||||
String cache = "";
|
||||
if (cacheLookupTime > 0) {
|
||||
int cacheLookups = cacheHits + cacheMisses;
|
||||
cache = String.format(" with %d cache lookup%s (%,.3fms %,2.2f%% - %,d hit, %,d miss)", cacheLookups,
|
||||
cacheLookups > 1 ? "s" : "", c, fc, cacheHits, cacheMisses);
|
||||
}
|
||||
String da = "";
|
||||
if (databaseTime.size() > 0 || cacheLookupTime > 0) {
|
||||
double dat = d + c;
|
||||
double daf = (dat / e) * 100;
|
||||
da = String.format(" consuming %,.3fms for data access, or %,2.2f%% of total UOW time.", dat, daf);
|
||||
}
|
||||
String x = nestedPurposes.stream().distinct().collect(Collectors.joining(", "));
|
||||
String n = nested.stream().map(uow -> String.valueOf(uow.hashCode())).collect(Collectors.joining(", "));
|
||||
String s = String.format(Locale.US, "UOW(%s%s) %s in %,.3fms%s%s%s%s%s", hashCode(),
|
||||
(nested.size() > 0 ? ", [" + n + "]" : ""), what, e, cache, database, da,
|
||||
(purpose == null ? "" : " " + purpose), (nestedPurposes.isEmpty()) ? "" : ", " + x);
|
||||
return s;
|
||||
}
|
||||
|
||||
private void applyPostCommitFunctions() {
|
||||
|
@ -101,7 +159,9 @@ public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfW
|
|||
f.apply();
|
||||
}
|
||||
}
|
||||
logTimers("committed");
|
||||
if (LOG.isInfoEnabled()) {
|
||||
LOG.info(logTimers("committed"));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -111,15 +171,14 @@ public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfW
|
|||
for (Facet facet : facets) {
|
||||
if (!facet.fixed()) {
|
||||
String columnName = facet.name() + "==" + facet.value();
|
||||
Object value = cache.get(tableName, columnName);
|
||||
if (value != null) {
|
||||
if (result.isPresent() && result.get() != value) {
|
||||
// One facet matched, but another did not.
|
||||
result = Optional.empty();
|
||||
break;
|
||||
} else {
|
||||
result = Optional.of(value);
|
||||
Either<Object, List<Facet>> eitherValue = cache.get(tableName, columnName);
|
||||
if (eitherValue != null) {
|
||||
Object value = deleted;
|
||||
if (eitherValue.isLeft()) {
|
||||
value = eitherValue.getLeft();
|
||||
}
|
||||
result = Optional.of(value);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -132,13 +191,46 @@ public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfW
|
|||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Facet> cacheEvict(List<Facet> facets) {
|
||||
Either<Object, List<Facet>> deletedObjectFacets = Either.right(facets);
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
Optional<Object> optionalValue = cacheLookup(facets);
|
||||
if (optionalValue.isPresent()) {
|
||||
Object value = optionalValue.get();
|
||||
|
||||
for (Facet facet : facets) {
|
||||
if (!facet.fixed()) {
|
||||
String columnKey = facet.name() + "==" + facet.value();
|
||||
// mark the value identified by the facet to `deleted`
|
||||
cache.put(tableName, columnKey, deletedObjectFacets);
|
||||
}
|
||||
}
|
||||
// look for other row/col pairs that referenced the same object, mark them
|
||||
// `deleted`
|
||||
cache.columnKeySet().forEach(columnKey -> {
|
||||
Either<Object, List<Facet>> eitherCachedValue = cache.get(tableName, columnKey);
|
||||
if (eitherCachedValue.isLeft()) {
|
||||
Object cachedValue = eitherCachedValue.getLeft();
|
||||
if (cachedValue == value) {
|
||||
cache.put(tableName, columnKey, deletedObjectFacets);
|
||||
String[] parts = columnKey.split("==");
|
||||
facets.add(new Facet<String>(parts[0], parts[1]));
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
return facets;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void cacheUpdate(Object value, List<Facet> facets) {
|
||||
Facet table = facets.remove(0);
|
||||
String tableName = table.value().toString();
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
for (Facet facet : facets) {
|
||||
String columnName = facet.name() + "==" + facet.value();
|
||||
cache.put(tableName, columnName, value);
|
||||
if (!facet.fixed()) {
|
||||
String columnName = facet.name() + "==" + facet.value();
|
||||
cache.put(tableName, columnName, Either.left(value));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -178,21 +270,37 @@ public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfW
|
|||
aborted = false;
|
||||
|
||||
nested.forEach((uow) -> Errors.rethrow().wrap(uow::commit));
|
||||
elapsedTime.stop();
|
||||
|
||||
// Merge UOW cache into parent's cache.
|
||||
if (parent != null) {
|
||||
parent.mergeCache(cache);
|
||||
} else {
|
||||
session.mergeCache(cache);
|
||||
}
|
||||
elapsedTime_.stop();
|
||||
|
||||
// Apply all post-commit functions for
|
||||
if (parent == null) {
|
||||
// Apply all post-commit functions, this is the outter-most UnitOfWork.
|
||||
traverser.postOrderTraversal(this).forEach(uow -> {
|
||||
uow.applyPostCommitFunctions();
|
||||
});
|
||||
|
||||
// Merge our cache into the session cache.
|
||||
session.mergeCache(cache);
|
||||
|
||||
return new PostCommitFunction(this, null);
|
||||
} else {
|
||||
|
||||
// Merge cache and statistics into parent if there is one.
|
||||
parent.mergeCache(cache);
|
||||
if (purpose != null) {
|
||||
parent.nestedPurposes.add(purpose);
|
||||
}
|
||||
parent.cacheHits += cacheHits;
|
||||
parent.cacheMisses += cacheMisses;
|
||||
parent.databaseLookups += databaseLookups;
|
||||
parent.cacheLookupTime += cacheLookupTime;
|
||||
for (String name : databaseTime.keySet()) {
|
||||
if (parent.databaseTime.containsKey(name)) {
|
||||
double t = parent.databaseTime.get(name);
|
||||
parent.databaseTime.put(name, t + databaseTime.get(name));
|
||||
} else {
|
||||
parent.databaseTime.put(name, databaseTime.get(name));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// else {
|
||||
|
@ -203,7 +311,7 @@ public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfW
|
|||
}
|
||||
|
||||
/* Explicitly discard the work and mark it as as such in the log. */
|
||||
public void abort() {
|
||||
public synchronized void abort() {
|
||||
TreeTraverser<AbstractUnitOfWork<E>> traverser = TreeTraverser.using(node -> node::getChildNodes);
|
||||
traverser.postOrderTraversal(this).forEach(uow -> {
|
||||
uow.committed = false;
|
||||
|
@ -211,18 +319,22 @@ public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfW
|
|||
});
|
||||
// log.record(txn::abort)
|
||||
// cache.invalidateSince(txn::start time)
|
||||
if (!hasAborted()) {
|
||||
elapsedTime_.stop();
|
||||
logTimers("aborted");
|
||||
if (LOG.isInfoEnabled()) {
|
||||
if (elapsedTime.isRunning()) {
|
||||
elapsedTime.stop();
|
||||
}
|
||||
LOG.info(logTimers("aborted"));
|
||||
}
|
||||
}
|
||||
|
||||
private void mergeCache(Table<String, String, Object> from) {
|
||||
Table<String, String, Object> to = this.cache;
|
||||
private void mergeCache(Table<String, String, Either<Object, List<Facet>>> from) {
|
||||
Table<String, String, Either<Object, List<Facet>>> to = this.cache;
|
||||
from.rowMap().forEach((rowKey, columnMap) -> {
|
||||
columnMap.forEach((columnKey, value) -> {
|
||||
if (to.contains(rowKey, columnKey)) {
|
||||
to.put(rowKey, columnKey, CacheUtil.merge(to.get(rowKey, columnKey), from.get(rowKey, columnKey)));
|
||||
// TODO(gburd):...
|
||||
to.put(rowKey, columnKey, Either.left(CacheUtil.merge(to.get(rowKey, columnKey).getLeft(),
|
||||
from.get(rowKey, columnKey).getLeft())));
|
||||
} else {
|
||||
to.put(rowKey, columnKey, from.get(rowKey, columnKey));
|
||||
}
|
||||
|
|
|
@ -1,3 +1,19 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package net.helenus.core;
|
||||
|
||||
public class ConflictingUnitOfWorkException extends Exception {
|
||||
|
|
|
@ -23,39 +23,38 @@ import java.lang.reflect.Constructor;
|
|||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.Executor;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.codahale.metrics.MetricRegistry;
|
||||
import com.datastax.driver.core.*;
|
||||
import com.google.common.cache.Cache;
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import com.google.common.collect.Table;
|
||||
|
||||
import brave.Tracer;
|
||||
import net.helenus.core.cache.CacheUtil;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.core.cache.SessionCache;
|
||||
import net.helenus.core.cache.UnboundFacet;
|
||||
import net.helenus.core.operation.*;
|
||||
import net.helenus.core.reflect.Drafted;
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.core.reflect.MapExportable;
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
import net.helenus.mapping.MappingUtil;
|
||||
import net.helenus.mapping.value.*;
|
||||
import net.helenus.support.DslPropertyException;
|
||||
import net.helenus.support.Fun;
|
||||
import net.helenus.support.*;
|
||||
import net.helenus.support.Fun.Tuple1;
|
||||
import net.helenus.support.Fun.Tuple2;
|
||||
import net.helenus.support.Fun.Tuple6;
|
||||
import net.helenus.support.HelenusException;
|
||||
import net.helenus.support.HelenusMappingException;
|
||||
|
||||
public final class HelenusSession extends AbstractSessionOperations implements Closeable {
|
||||
|
||||
private final int MAX_CACHE_SIZE = 10000;
|
||||
private final int MAX_CACHE_EXPIRE_SECONDS = 600;
|
||||
public static final Object deleted = new Object();
|
||||
private static final Logger LOG = LoggerFactory.getLogger(HelenusSession.class);
|
||||
|
||||
private final Session session;
|
||||
private final CodecRegistry registry;
|
||||
|
@ -68,7 +67,7 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
private final SessionRepository sessionRepository;
|
||||
private final Executor executor;
|
||||
private final boolean dropSchemaOnClose;
|
||||
private final Cache sessionCache;
|
||||
private final SessionCache<String, Object> sessionCache;
|
||||
private final RowColumnValueProvider valueProvider;
|
||||
private final StatementColumnValuePreparer valuePreparer;
|
||||
private final Metadata metadata;
|
||||
|
@ -78,7 +77,8 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
HelenusSession(Session session, String usingKeyspace, CodecRegistry registry, boolean showCql,
|
||||
PrintStream printStream, SessionRepositoryBuilder sessionRepositoryBuilder, Executor executor,
|
||||
boolean dropSchemaOnClose, ConsistencyLevel consistencyLevel, boolean defaultQueryIdempotency,
|
||||
Class<? extends UnitOfWork> unitOfWorkClass, MetricRegistry metricRegistry, Tracer tracer) {
|
||||
Class<? extends UnitOfWork> unitOfWorkClass, SessionCache sessionCache, MetricRegistry metricRegistry,
|
||||
Tracer tracer) {
|
||||
this.session = session;
|
||||
this.registry = registry == null ? CodecRegistry.DEFAULT_INSTANCE : registry;
|
||||
this.usingKeyspace = Objects.requireNonNull(usingKeyspace,
|
||||
|
@ -94,8 +94,11 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
this.metricRegistry = metricRegistry;
|
||||
this.zipkinTracer = tracer;
|
||||
|
||||
this.sessionCache = CacheBuilder.newBuilder().maximumSize(MAX_CACHE_SIZE)
|
||||
.expireAfterAccess(MAX_CACHE_EXPIRE_SECONDS, TimeUnit.SECONDS).recordStats().build();
|
||||
if (sessionCache == null) {
|
||||
this.sessionCache = SessionCache.<String, Object>defaultCache();
|
||||
} else {
|
||||
this.sessionCache = sessionCache;
|
||||
}
|
||||
|
||||
this.valueProvider = new RowColumnValueProvider(this.sessionRepository);
|
||||
this.valuePreparer = new StatementColumnValuePreparer(this.sessionRepository);
|
||||
|
@ -184,7 +187,7 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
Object result = null;
|
||||
for (String[] combination : facetCombinations) {
|
||||
String cacheKey = tableName + "." + Arrays.toString(combination);
|
||||
result = sessionCache.getIfPresent(cacheKey);
|
||||
result = sessionCache.get(cacheKey);
|
||||
if (result != null) {
|
||||
return result;
|
||||
}
|
||||
|
@ -192,6 +195,16 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void cacheEvict(List<Facet> facets) {
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
List<String[]> facetCombinations = CacheUtil.flattenFacets(facets);
|
||||
for (String[] combination : facetCombinations) {
|
||||
String cacheKey = tableName + "." + Arrays.toString(combination);
|
||||
sessionCache.invalidate(cacheKey);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void updateCache(Object pojo, List<Facet> facets) {
|
||||
Map<String, Object> valueMap = pojo instanceof MapExportable ? ((MapExportable) pojo).toMap() : null;
|
||||
|
@ -200,14 +213,18 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
if (facet instanceof UnboundFacet) {
|
||||
UnboundFacet unboundFacet = (UnboundFacet) facet;
|
||||
UnboundFacet.Binder binder = unboundFacet.binder();
|
||||
unboundFacet.getProperties().forEach(prop -> {
|
||||
for (HelenusProperty prop : unboundFacet.getProperties()) {
|
||||
Object value;
|
||||
if (valueMap == null) {
|
||||
Object value = BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
binder.setValueForProperty(prop, value.toString());
|
||||
value = BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
if (value != null) {
|
||||
binder.setValueForProperty(prop, value.toString());
|
||||
}
|
||||
} else {
|
||||
binder.setValueForProperty(prop, valueMap.get(prop.getPropertyName()).toString());
|
||||
value = valueMap.get(prop.getPropertyName());
|
||||
binder.setValueForProperty(prop, value.toString());
|
||||
}
|
||||
});
|
||||
}
|
||||
if (binder.isBound()) {
|
||||
boundFacets.add(binder.bind());
|
||||
}
|
||||
|
@ -217,28 +234,14 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
}
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
List<String[]> facetCombinations = CacheUtil.flattenFacets(boundFacets);
|
||||
Object value = sessionCache.getIfPresent(pojo);
|
||||
Object mergedValue = null;
|
||||
for (String[] combination : facetCombinations) {
|
||||
String cacheKey = tableName + "." + Arrays.toString(combination);
|
||||
if (value == null) {
|
||||
sessionCache.put(cacheKey, pojo);
|
||||
} else {
|
||||
if (mergedValue == null) {
|
||||
mergedValue = pojo;
|
||||
} else {
|
||||
mergedValue = CacheUtil.merge(value, pojo);
|
||||
}
|
||||
sessionCache.put(mergedValue, pojo);
|
||||
}
|
||||
}
|
||||
|
||||
mergeAndUpdateCacheValues(pojo, tableName, facetCombinations);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void mergeCache(Table<String, String, Object> uowCache) {
|
||||
List<Object> pojos = uowCache.values().stream().distinct().collect(Collectors.toList());
|
||||
for (Object pojo : pojos) {
|
||||
public void mergeCache(Table<String, String, Either<Object, List<Facet>>> uowCache) {
|
||||
List<Object> items = uowCache.values().stream().filter(Either::isLeft).map(Either::getLeft).distinct()
|
||||
.collect(Collectors.toList());
|
||||
for (Object pojo : items) {
|
||||
HelenusEntity entity = Helenus.resolve(MappingUtil.getMappingInterface(pojo));
|
||||
Map<String, Object> valueMap = pojo instanceof MapExportable ? ((MapExportable) pojo).toMap() : null;
|
||||
if (entity.isCacheable()) {
|
||||
|
@ -249,7 +252,7 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
UnboundFacet.Binder binder = unboundFacet.binder();
|
||||
unboundFacet.getProperties().forEach(prop -> {
|
||||
if (valueMap == null) {
|
||||
Object value = BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
Object value = BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop);
|
||||
binder.setValueForProperty(prop, value.toString());
|
||||
} else {
|
||||
binder.setValueForProperty(prop, valueMap.get(prop.getPropertyName()).toString());
|
||||
|
@ -262,24 +265,39 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
boundFacets.add(facet);
|
||||
}
|
||||
}
|
||||
String tableName = entity.getName().toCql();
|
||||
// NOTE: should equal `String tableName = CacheUtil.schemaName(facets);`
|
||||
List<String[]> facetCombinations = CacheUtil.flattenFacets(boundFacets);
|
||||
Object value = sessionCache.getIfPresent(pojo);
|
||||
Object mergedValue = null;
|
||||
for (String[] combination : facetCombinations) {
|
||||
String cacheKey = tableName + "." + Arrays.toString(combination);
|
||||
if (value == null) {
|
||||
sessionCache.put(cacheKey, pojo);
|
||||
} else {
|
||||
if (mergedValue == null) {
|
||||
mergedValue = pojo;
|
||||
} else {
|
||||
mergedValue = CacheUtil.merge(value, pojo);
|
||||
}
|
||||
sessionCache.put(mergedValue, pojo);
|
||||
}
|
||||
String tableName = CacheUtil.schemaName(boundFacets);
|
||||
mergeAndUpdateCacheValues(pojo, tableName, facetCombinations);
|
||||
}
|
||||
}
|
||||
|
||||
List<List<Facet>> deletedFacetSets = uowCache.values().stream().filter(Either::isRight).map(Either::getRight)
|
||||
.collect(Collectors.toList());
|
||||
for (List<Facet> facets : deletedFacetSets) {
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
List<String[]> combinations = CacheUtil.flattenFacets(facets);
|
||||
for (String[] combination : combinations) {
|
||||
String cacheKey = tableName + "." + Arrays.toString(combination);
|
||||
sessionCache.invalidate(cacheKey);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void mergeAndUpdateCacheValues(Object pojo, String tableName, List<String[]> facetCombinations) {
|
||||
Object merged = null;
|
||||
for (String[] combination : facetCombinations) {
|
||||
String cacheKey = tableName + "." + Arrays.toString(combination);
|
||||
Object value = sessionCache.get(cacheKey);
|
||||
if (value == null) {
|
||||
sessionCache.put(cacheKey, pojo);
|
||||
} else {
|
||||
if (merged == null) {
|
||||
merged = pojo;
|
||||
} else {
|
||||
merged = CacheUtil.merge(value, pojo);
|
||||
}
|
||||
sessionCache.put(cacheKey, merged);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -288,8 +306,8 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
return metadata;
|
||||
}
|
||||
|
||||
public synchronized UnitOfWork begin() {
|
||||
return begin(null);
|
||||
public UnitOfWork begin() {
|
||||
return this.begin(null);
|
||||
}
|
||||
|
||||
public synchronized UnitOfWork begin(UnitOfWork parent) {
|
||||
|
@ -297,6 +315,20 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
Class<? extends UnitOfWork> clazz = unitOfWorkClass;
|
||||
Constructor<? extends UnitOfWork> ctor = clazz.getConstructor(HelenusSession.class, UnitOfWork.class);
|
||||
UnitOfWork uow = ctor.newInstance(this, parent);
|
||||
if (LOG.isInfoEnabled() && uow.getPurpose() == null) {
|
||||
StringBuilder purpose = null;
|
||||
StackTraceElement[] trace = Thread.currentThread().getStackTrace();
|
||||
int frame = 2;
|
||||
if (trace[2].getMethodName().equals("begin")) {
|
||||
frame = 3;
|
||||
} else if (trace[2].getClassName().equals(unitOfWorkClass.getName())) {
|
||||
frame = 3;
|
||||
}
|
||||
purpose = new StringBuilder().append(trace[frame].getClassName()).append(".")
|
||||
.append(trace[frame].getMethodName()).append("(").append(trace[frame].getFileName()).append(":")
|
||||
.append(trace[frame].getLineNumber()).append(")");
|
||||
uow.setPurpose(purpose.toString());
|
||||
}
|
||||
if (parent != null) {
|
||||
parent.addNestedUnitOfWork(uow);
|
||||
}
|
||||
|
@ -470,6 +502,14 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
return new UpdateOperation<ResultSet>(this);
|
||||
}
|
||||
|
||||
public <E> UpdateOperation<E> update(Object pojo) {
|
||||
if (pojo instanceof MapExportable == false) {
|
||||
throw new HelenusMappingException(
|
||||
"update of objects that don't implement MapExportable is not yet supported");
|
||||
}
|
||||
return new UpdateOperation<E>(this, pojo);
|
||||
}
|
||||
|
||||
public <E> UpdateOperation<E> update(Drafted<E> drafted) {
|
||||
if (drafted instanceof AbstractEntityDraft == false) {
|
||||
throw new HelenusMappingException(
|
||||
|
|
|
@ -27,6 +27,7 @@ import com.datastax.driver.core.*;
|
|||
import com.google.common.util.concurrent.MoreExecutors;
|
||||
|
||||
import brave.Tracer;
|
||||
import net.helenus.core.cache.SessionCache;
|
||||
import net.helenus.core.reflect.DslExportable;
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.mapping.HelenusEntityType;
|
||||
|
@ -56,6 +57,7 @@ public final class SessionInitializer extends AbstractSessionOperations {
|
|||
private boolean dropUnusedIndexes = false;
|
||||
private KeyspaceMetadata keyspaceMetadata;
|
||||
private AutoDdl autoDdl = AutoDdl.UPDATE;
|
||||
private SessionCache sessionCache = null;
|
||||
|
||||
SessionInitializer(Session session) {
|
||||
this.session = Objects.requireNonNull(session, "empty session");
|
||||
|
@ -123,6 +125,11 @@ public final class SessionInitializer extends AbstractSessionOperations {
|
|||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer setSessionCache(SessionCache sessionCache) {
|
||||
this.sessionCache = sessionCache;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ConsistencyLevel getDefaultConsistencyLevel() {
|
||||
return consistencyLevel;
|
||||
}
|
||||
|
@ -243,8 +250,8 @@ public final class SessionInitializer extends AbstractSessionOperations {
|
|||
public synchronized HelenusSession get() {
|
||||
initialize();
|
||||
return new HelenusSession(session, usingKeyspace, registry, showCql, printStream, sessionRepository, executor,
|
||||
autoDdl == AutoDdl.CREATE_DROP, consistencyLevel, idempotent, unitOfWorkClass, metricRegistry,
|
||||
zipkinTracer);
|
||||
autoDdl == AutoDdl.CREATE_DROP, consistencyLevel, idempotent, unitOfWorkClass, sessionCache,
|
||||
metricRegistry, zipkinTracer);
|
||||
}
|
||||
|
||||
private void initialize() {
|
||||
|
|
|
@ -25,8 +25,8 @@ import net.helenus.core.cache.Facet;
|
|||
public interface UnitOfWork<X extends Exception> extends AutoCloseable {
|
||||
|
||||
/**
|
||||
* Marks the beginning of a transactional section of work. Will write a record
|
||||
* to the shared write-ahead log.
|
||||
* Marks the beginning of a transactional section of work. Will write a
|
||||
* recordCacheAndDatabaseOperationCount to the shared write-ahead log.
|
||||
*
|
||||
* @return the handle used to commit or abort the work.
|
||||
*/
|
||||
|
@ -59,10 +59,15 @@ public interface UnitOfWork<X extends Exception> extends AutoCloseable {
|
|||
|
||||
void cacheUpdate(Object pojo, List<Facet> facets);
|
||||
|
||||
List<Facet> cacheEvict(List<Facet> facets);
|
||||
|
||||
String getPurpose();
|
||||
UnitOfWork setPurpose(String purpose);
|
||||
|
||||
Stopwatch getExecutionTimer();
|
||||
void addDatabaseTime(String name, Stopwatch amount);
|
||||
void addCacheLookupTime(Stopwatch amount);
|
||||
|
||||
Stopwatch getCacheLookupTimer();
|
||||
// Cache > 0 means "cache hit", < 0 means cache miss.
|
||||
void recordCacheAndDatabaseOperationCount(int cache, int database);
|
||||
|
||||
}
|
||||
|
|
|
@ -1,9 +1,26 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package net.helenus.core.annotation;
|
||||
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import net.helenus.core.ConflictingUnitOfWorkException;
|
||||
|
||||
|
@ -11,7 +28,7 @@ import net.helenus.core.ConflictingUnitOfWorkException;
|
|||
@Target(ElementType.METHOD)
|
||||
public @interface Retry {
|
||||
|
||||
Class<? extends Exception>[] on() default ConflictingUnitOfWorkException.class;
|
||||
Class<? extends Exception>[] on() default {ConflictingUnitOfWorkException.class, TimeoutException.class};
|
||||
|
||||
int times() default 3;
|
||||
}
|
||||
|
|
|
@ -1,3 +1,19 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package net.helenus.core.aspect;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
|
@ -73,7 +89,7 @@ public class RetryAspect {
|
|||
return retryAnnotation;
|
||||
}
|
||||
|
||||
Class[] argClasses = new Class[pjp.getArgs().length];
|
||||
Class<?>[] argClasses = new Class[pjp.getArgs().length];
|
||||
for (int i = 0; i < pjp.getArgs().length; i++) {
|
||||
argClasses[i] = pjp.getArgs()[i].getClass();
|
||||
}
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
*/
|
||||
package net.helenus.core.cache;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
|
@ -23,7 +24,13 @@ import net.helenus.mapping.HelenusProperty;
|
|||
public class BoundFacet extends Facet<String> {
|
||||
private final Map<HelenusProperty, Object> properties;
|
||||
|
||||
BoundFacet(String name, Map<HelenusProperty, Object> properties) {
|
||||
public BoundFacet(HelenusProperty property, Object value) {
|
||||
super(property.getPropertyName(), value == null ? null : value.toString());
|
||||
this.properties = new HashMap<HelenusProperty, Object>(1);
|
||||
this.properties.put(property, value);
|
||||
}
|
||||
|
||||
public BoundFacet(String name, Map<HelenusProperty, Object> properties) {
|
||||
super(name,
|
||||
(properties.keySet().size() > 1)
|
||||
? "[" + String.join(", ",
|
||||
|
|
|
@ -38,7 +38,22 @@ public class CacheUtil {
|
|||
}
|
||||
|
||||
public static Object merge(Object to, Object from) {
|
||||
return to; // TODO(gburd): yeah...
|
||||
if (to == from) {
|
||||
return to;
|
||||
} else {
|
||||
return from;
|
||||
}
|
||||
/*
|
||||
* // TODO(gburd): take ttl and writeTime into account when merging. Map<String,
|
||||
* Object> toValueMap = to instanceof MapExportable ? ((MapExportable)
|
||||
* to).toMap() : null; Map<String, Object> fromValueMap = to instanceof
|
||||
* MapExportable ? ((MapExportable) from).toMap() : null;
|
||||
*
|
||||
* if (toValueMap != null && fromValueMap != null) { for (String key :
|
||||
* fromValueMap.keySet()) { if (toValueMap.containsKey(key) &&
|
||||
* toValueMap.get(key) != fromValueMap.get(key)) { toValueMap.put(key,
|
||||
* fromValueMap.get(key)); } } } return to;
|
||||
*/
|
||||
}
|
||||
|
||||
public static String schemaName(List<Facet> facets) {
|
||||
|
|
44
src/main/java/net/helenus/core/cache/GuavaCache.java
vendored
Normal file
44
src/main/java/net/helenus/core/cache/GuavaCache.java
vendored
Normal file
|
@ -0,0 +1,44 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package net.helenus.core.cache;
|
||||
|
||||
import com.google.common.cache.Cache;
|
||||
|
||||
public class GuavaCache<K, V> implements SessionCache<K, V> {
|
||||
|
||||
final Cache<K, V> cache;
|
||||
|
||||
GuavaCache(Cache<K, V> cache) {
|
||||
this.cache = cache;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void invalidate(K key) {
|
||||
cache.invalidate(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public V get(K key) {
|
||||
return cache.getIfPresent(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void put(K key, V value) {
|
||||
cache.put(key, value);
|
||||
}
|
||||
|
||||
}
|
36
src/main/java/net/helenus/core/cache/SessionCache.java
vendored
Normal file
36
src/main/java/net/helenus/core/cache/SessionCache.java
vendored
Normal file
|
@ -0,0 +1,36 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package net.helenus.core.cache;
|
||||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
|
||||
public interface SessionCache<K, V> {
|
||||
|
||||
static <K, V> SessionCache<K, V> defaultCache() {
|
||||
int MAX_CACHE_SIZE = 10000;
|
||||
int MAX_CACHE_EXPIRE_SECONDS = 600;
|
||||
return new GuavaCache<K, V>(CacheBuilder.newBuilder().maximumSize(MAX_CACHE_SIZE)
|
||||
.expireAfterAccess(MAX_CACHE_EXPIRE_SECONDS, TimeUnit.SECONDS)
|
||||
.expireAfterWrite(MAX_CACHE_EXPIRE_SECONDS, TimeUnit.SECONDS).recordStats().build());
|
||||
}
|
||||
|
||||
void invalidate(K key);
|
||||
V get(K key);
|
||||
void put(K key, V value);
|
||||
}
|
|
@ -15,10 +15,12 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.*;
|
||||
|
||||
import net.helenus.core.*;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.core.cache.UnboundFacet;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
|
||||
public abstract class AbstractFilterOperation<E, O extends AbstractFilterOperation<E, O>>
|
||||
extends
|
||||
|
@ -107,4 +109,39 @@ public abstract class AbstractFilterOperation<E, O extends AbstractFilterOperati
|
|||
}
|
||||
ifFilters.add(filter);
|
||||
}
|
||||
|
||||
protected List<Facet> bindFacetValues(List<Facet> facets) {
|
||||
if (facets == null) {
|
||||
return new ArrayList<Facet>();
|
||||
}
|
||||
List<Facet> boundFacets = new ArrayList<>();
|
||||
Map<HelenusProperty, Filter> filterMap = new HashMap<>(filters.size());
|
||||
filters.forEach(f -> filterMap.put(f.getNode().getProperty(), f));
|
||||
|
||||
for (Facet facet : facets) {
|
||||
if (facet instanceof UnboundFacet) {
|
||||
UnboundFacet unboundFacet = (UnboundFacet) facet;
|
||||
UnboundFacet.Binder binder = unboundFacet.binder();
|
||||
if (filters != null) {
|
||||
for (HelenusProperty prop : unboundFacet.getProperties()) {
|
||||
|
||||
Filter filter = filterMap.get(prop);
|
||||
if (filter != null) {
|
||||
Object[] postulates = filter.postulateValues();
|
||||
for (Object p : postulates) {
|
||||
binder.setValueForProperty(prop, p.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
if (binder.isBound()) {
|
||||
boundFacets.add(binder.bind());
|
||||
}
|
||||
} else {
|
||||
boundFacets.add(facet);
|
||||
}
|
||||
}
|
||||
return boundFacets;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,6 +16,8 @@
|
|||
package net.helenus.core.operation;
|
||||
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.CompletionException;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import com.codahale.metrics.Timer;
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
|
@ -31,15 +33,11 @@ public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>> ex
|
|||
|
||||
public abstract E transform(ResultSet resultSet);
|
||||
|
||||
public boolean cacheable() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public PreparedOperation<E> prepare() {
|
||||
return new PreparedOperation<E>(prepareStatement(), this);
|
||||
}
|
||||
|
||||
public E sync() {// throws TimeoutException {
|
||||
public E sync() throws TimeoutException {
|
||||
final Timer.Context context = requestLatency.time();
|
||||
try {
|
||||
ResultSet resultSet = this.execute(sessionOps, null, traceContext, queryExecutionTimeout, queryTimeoutUnits,
|
||||
|
@ -50,7 +48,7 @@ public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>> ex
|
|||
}
|
||||
}
|
||||
|
||||
public E sync(UnitOfWork uow) {// throws TimeoutException {
|
||||
public E sync(UnitOfWork uow) throws TimeoutException {
|
||||
if (uow == null)
|
||||
return sync();
|
||||
|
||||
|
@ -67,11 +65,11 @@ public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>> ex
|
|||
|
||||
public CompletableFuture<E> async() {
|
||||
return CompletableFuture.<E>supplyAsync(() -> {
|
||||
// try {
|
||||
return sync();
|
||||
// } catch (TimeoutException ex) {
|
||||
// throw new CompletionException(ex);
|
||||
// }
|
||||
try {
|
||||
return sync();
|
||||
} catch (TimeoutException ex) {
|
||||
throw new CompletionException(ex);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -79,11 +77,11 @@ public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>> ex
|
|||
if (uow == null)
|
||||
return async();
|
||||
return CompletableFuture.<E>supplyAsync(() -> {
|
||||
// try {
|
||||
return sync();
|
||||
// } catch (TimeoutException ex) {
|
||||
// throw new CompletionException(ex);
|
||||
// }
|
||||
try {
|
||||
return sync();
|
||||
} catch (TimeoutException ex) {
|
||||
throw new CompletionException(ex);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,9 +15,13 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import static net.helenus.core.HelenusSession.deleted;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.CompletionException;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import com.codahale.metrics.Timer;
|
||||
import com.datastax.driver.core.PreparedStatement;
|
||||
|
@ -57,20 +61,25 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
|
|||
});
|
||||
}
|
||||
|
||||
public Optional<E> sync() {// throws TimeoutException {
|
||||
public Optional<E> sync() throws TimeoutException {
|
||||
final Timer.Context context = requestLatency.time();
|
||||
try {
|
||||
Optional<E> result = Optional.empty();
|
||||
E cacheResult = null;
|
||||
boolean updateCache = isSessionCacheable();
|
||||
boolean updateCache = isSessionCacheable() && checkCache;
|
||||
|
||||
if (enableCache && isSessionCacheable()) {
|
||||
if (checkCache && isSessionCacheable()) {
|
||||
List<Facet> facets = bindFacetValues();
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
cacheResult = (E) sessionOps.checkCache(tableName, facets);
|
||||
if (cacheResult != null) {
|
||||
result = Optional.of(cacheResult);
|
||||
updateCache = false;
|
||||
sessionCacheHits.mark();
|
||||
cacheHits.mark();
|
||||
} else {
|
||||
sessionCacheMiss.mark();
|
||||
cacheMiss.mark();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -95,7 +104,7 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
|
|||
}
|
||||
}
|
||||
|
||||
public Optional<E> sync(UnitOfWork<?> uow) {// throws TimeoutException {
|
||||
public Optional<E> sync(UnitOfWork<?> uow) throws TimeoutException {
|
||||
if (uow == null)
|
||||
return sync();
|
||||
|
||||
|
@ -103,30 +112,59 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
|
|||
try {
|
||||
|
||||
Optional<E> result = Optional.empty();
|
||||
E cacheResult = null;
|
||||
boolean updateCache = true;
|
||||
E cachedResult = null;
|
||||
final boolean updateCache;
|
||||
|
||||
if (enableCache) {
|
||||
Stopwatch timer = uow.getCacheLookupTimer();
|
||||
timer.start();
|
||||
List<Facet> facets = bindFacetValues();
|
||||
cacheResult = checkCache(uow, facets);
|
||||
if (cacheResult != null) {
|
||||
result = Optional.of(cacheResult);
|
||||
updateCache = false;
|
||||
} else {
|
||||
if (isSessionCacheable()) {
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
cacheResult = (E) sessionOps.checkCache(tableName, facets);
|
||||
if (cacheResult != null) {
|
||||
result = Optional.of(cacheResult);
|
||||
if (checkCache) {
|
||||
Stopwatch timer = Stopwatch.createStarted();
|
||||
try {
|
||||
List<Facet> facets = bindFacetValues();
|
||||
if (facets != null) {
|
||||
cachedResult = checkCache(uow, facets);
|
||||
if (cachedResult != null) {
|
||||
updateCache = false;
|
||||
result = Optional.of(cachedResult);
|
||||
uowCacheHits.mark();
|
||||
cacheHits.mark();
|
||||
uow.recordCacheAndDatabaseOperationCount(1, 0);
|
||||
} else {
|
||||
updateCache = true;
|
||||
uowCacheMiss.mark();
|
||||
if (isSessionCacheable()) {
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
cachedResult = (E) sessionOps.checkCache(tableName, facets);
|
||||
if (cachedResult != null) {
|
||||
result = Optional.of(cachedResult);
|
||||
sessionCacheHits.mark();
|
||||
cacheHits.mark();
|
||||
uow.recordCacheAndDatabaseOperationCount(1, 0);
|
||||
} else {
|
||||
sessionCacheMiss.mark();
|
||||
cacheMiss.mark();
|
||||
uow.recordCacheAndDatabaseOperationCount(-1, 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
updateCache = false;
|
||||
}
|
||||
} finally {
|
||||
timer.stop();
|
||||
uow.addCacheLookupTime(timer);
|
||||
}
|
||||
timer.stop();
|
||||
} else {
|
||||
updateCache = false;
|
||||
}
|
||||
|
||||
if (!result.isPresent()) {
|
||||
// Check to see if we fetched the object from the cache
|
||||
if (result.isPresent()) {
|
||||
// If we fetched the `deleted` object then the result is null (really
|
||||
// Optional.empty()).
|
||||
if (result.get() == deleted) {
|
||||
result = Optional.empty();
|
||||
}
|
||||
} else {
|
||||
|
||||
// Formulate the query and execute it against the Cassandra cluster.
|
||||
ResultSet resultSet = execute(sessionOps, uow, traceContext, queryExecutionTimeout, queryTimeoutUnits,
|
||||
showValues, true);
|
||||
|
@ -136,10 +174,9 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
|
|||
}
|
||||
|
||||
// If we have a result, it wasn't from the UOW cache, and we're caching things
|
||||
// then we
|
||||
// need to put this result into the cache for future requests to find.
|
||||
if (updateCache && result.isPresent()) {
|
||||
updateCache(uow, result.get(), getFacets());
|
||||
// then we need to put this result into the cache for future requests to find.
|
||||
if (updateCache && result.isPresent() && result.get() != deleted) {
|
||||
cacheUpdate(uow, result.get(), getFacets());
|
||||
}
|
||||
|
||||
return result;
|
||||
|
@ -150,11 +187,11 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
|
|||
|
||||
public CompletableFuture<Optional<E>> async() {
|
||||
return CompletableFuture.<Optional<E>>supplyAsync(() -> {
|
||||
// try {
|
||||
return sync();
|
||||
// } catch (TimeoutException ex) {
|
||||
// throw new CompletionException(ex);
|
||||
// }
|
||||
try {
|
||||
return sync();
|
||||
} catch (TimeoutException ex) {
|
||||
throw new CompletionException(ex);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -162,11 +199,11 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
|
|||
if (uow == null)
|
||||
return async();
|
||||
return CompletableFuture.<Optional<E>>supplyAsync(() -> {
|
||||
// try {
|
||||
return sync();
|
||||
// } catch (TimeoutException ex) {
|
||||
// throw new CompletionException(ex);
|
||||
// }
|
||||
try {
|
||||
return sync();
|
||||
} catch (TimeoutException ex) {
|
||||
throw new CompletionException(ex);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,9 +21,6 @@ import java.util.Map;
|
|||
import java.util.Optional;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.datastax.driver.core.ConsistencyLevel;
|
||||
import com.datastax.driver.core.PreparedStatement;
|
||||
import com.datastax.driver.core.RegularStatement;
|
||||
|
@ -42,14 +39,13 @@ import net.helenus.core.UnitOfWork;
|
|||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.core.cache.UnboundFacet;
|
||||
import net.helenus.core.reflect.MapExportable;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
import net.helenus.mapping.value.BeanColumnValueProvider;
|
||||
import net.helenus.support.HelenusException;
|
||||
|
||||
public abstract class AbstractStatementOperation<E, O extends AbstractStatementOperation<E, O>> extends Operation<E> {
|
||||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(AbstractStatementOperation.class);
|
||||
|
||||
protected boolean enableCache = true;
|
||||
protected boolean checkCache = true;
|
||||
protected boolean showValues = true;
|
||||
protected TraceContext traceContext;
|
||||
long queryExecutionTimeout = 10;
|
||||
|
@ -70,13 +66,13 @@ public abstract class AbstractStatementOperation<E, O extends AbstractStatementO
|
|||
|
||||
public abstract Statement buildStatement(boolean cached);
|
||||
|
||||
public O ignoreCache(boolean enabled) {
|
||||
enableCache = enabled;
|
||||
public O uncached(boolean enabled) {
|
||||
checkCache = enabled;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public O ignoreCache() {
|
||||
enableCache = true;
|
||||
public O uncached() {
|
||||
checkCache = false;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
|
@ -326,21 +322,14 @@ public abstract class AbstractStatementOperation<E, O extends AbstractStatementO
|
|||
if (!facets.isEmpty()) {
|
||||
optionalCachedResult = uow.cacheLookup(facets);
|
||||
if (optionalCachedResult.isPresent()) {
|
||||
uowCacheHits.mark();
|
||||
LOG.info("UnitOfWork({}) cache hit using facets", uow.hashCode());
|
||||
result = (E) optionalCachedResult.get();
|
||||
}
|
||||
}
|
||||
|
||||
if (result == null) {
|
||||
uowCacheMiss.mark();
|
||||
LOG.info("UnitOfWork({}) cache miss", uow.hashCode());
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
protected void updateCache(UnitOfWork<?> uow, E pojo, List<Facet> identifyingFacets) {
|
||||
protected void cacheUpdate(UnitOfWork<?> uow, E pojo, List<Facet> identifyingFacets) {
|
||||
List<Facet> facets = new ArrayList<>();
|
||||
Map<String, Object> valueMap = pojo instanceof MapExportable ? ((MapExportable) pojo).toMap() : null;
|
||||
|
||||
|
@ -348,15 +337,23 @@ public abstract class AbstractStatementOperation<E, O extends AbstractStatementO
|
|||
if (facet instanceof UnboundFacet) {
|
||||
UnboundFacet unboundFacet = (UnboundFacet) facet;
|
||||
UnboundFacet.Binder binder = unboundFacet.binder();
|
||||
unboundFacet.getProperties().forEach(prop -> {
|
||||
for (HelenusProperty prop : unboundFacet.getProperties()) {
|
||||
Object value;
|
||||
if (valueMap == null) {
|
||||
Object value = BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
binder.setValueForProperty(prop, value.toString());
|
||||
value = BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
if (value != null) {
|
||||
binder.setValueForProperty(prop, value.toString());
|
||||
}
|
||||
} else {
|
||||
binder.setValueForProperty(prop, valueMap.get(prop.getPropertyName()).toString());
|
||||
value = valueMap.get(prop.getPropertyName());
|
||||
if (value != null) {
|
||||
binder.setValueForProperty(prop, value.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
if (binder.isBound()) {
|
||||
facets.add(binder.bind());
|
||||
});
|
||||
}
|
||||
} else {
|
||||
facets.add(facet);
|
||||
}
|
||||
|
|
|
@ -15,9 +15,13 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import static net.helenus.core.HelenusSession.deleted;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.CompletionException;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import com.codahale.metrics.Timer;
|
||||
|
@ -58,20 +62,25 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
|
|||
});
|
||||
}
|
||||
|
||||
public Stream<E> sync() {// throws TimeoutException {
|
||||
public Stream<E> sync() throws TimeoutException {
|
||||
final Timer.Context context = requestLatency.time();
|
||||
try {
|
||||
Stream<E> resultStream = null;
|
||||
E cacheResult = null;
|
||||
boolean updateCache = isSessionCacheable();
|
||||
|
||||
if (enableCache && isSessionCacheable()) {
|
||||
if (checkCache && isSessionCacheable()) {
|
||||
List<Facet> facets = bindFacetValues();
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
cacheResult = (E) sessionOps.checkCache(tableName, facets);
|
||||
if (cacheResult != null) {
|
||||
resultStream = Stream.of(cacheResult);
|
||||
updateCache = false;
|
||||
sessionCacheHits.mark();
|
||||
cacheHits.mark();
|
||||
} else {
|
||||
sessionCacheMiss.mark();
|
||||
cacheMiss.mark();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -102,7 +111,7 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
|
|||
}
|
||||
}
|
||||
|
||||
public Stream<E> sync(UnitOfWork<?> uow) {// throws TimeoutException {
|
||||
public Stream<E> sync(UnitOfWork uow) throws TimeoutException {
|
||||
if (uow == null)
|
||||
return sync();
|
||||
|
||||
|
@ -110,20 +119,50 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
|
|||
try {
|
||||
Stream<E> resultStream = null;
|
||||
E cachedResult = null;
|
||||
boolean updateCache = true;
|
||||
final boolean updateCache;
|
||||
|
||||
if (enableCache) {
|
||||
Stopwatch timer = uow.getCacheLookupTimer();
|
||||
timer.start();
|
||||
List<Facet> facets = bindFacetValues();
|
||||
cachedResult = checkCache(uow, facets);
|
||||
if (cachedResult != null) {
|
||||
resultStream = Stream.of(cachedResult);
|
||||
updateCache = false;
|
||||
if (checkCache) {
|
||||
Stopwatch timer = Stopwatch.createStarted();
|
||||
try {
|
||||
List<Facet> facets = bindFacetValues();
|
||||
if (facets != null) {
|
||||
cachedResult = checkCache(uow, facets);
|
||||
if (cachedResult != null) {
|
||||
updateCache = false;
|
||||
resultStream = Stream.of(cachedResult);
|
||||
uowCacheHits.mark();
|
||||
cacheHits.mark();
|
||||
uow.recordCacheAndDatabaseOperationCount(1, 0);
|
||||
} else {
|
||||
updateCache = true;
|
||||
uowCacheMiss.mark();
|
||||
if (isSessionCacheable()) {
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
cachedResult = (E) sessionOps.checkCache(tableName, facets);
|
||||
if (cachedResult != null) {
|
||||
resultStream = Stream.of(cachedResult);
|
||||
sessionCacheHits.mark();
|
||||
cacheHits.mark();
|
||||
uow.recordCacheAndDatabaseOperationCount(1, 0);
|
||||
} else {
|
||||
sessionCacheMiss.mark();
|
||||
cacheMiss.mark();
|
||||
uow.recordCacheAndDatabaseOperationCount(-1, 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
updateCache = false;
|
||||
}
|
||||
} finally {
|
||||
timer.stop();
|
||||
uow.addCacheLookupTime(timer);
|
||||
}
|
||||
timer.stop();
|
||||
} else {
|
||||
updateCache = false;
|
||||
}
|
||||
|
||||
// Check to see if we fetched the object from the cache
|
||||
if (resultStream == null) {
|
||||
ResultSet resultSet = execute(sessionOps, uow, traceContext, queryExecutionTimeout, queryTimeoutUnits,
|
||||
showValues, true);
|
||||
|
@ -132,12 +171,16 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
|
|||
|
||||
// If we have a result and we're caching then we need to put it into the cache
|
||||
// for future requests to find.
|
||||
if (updateCache && resultStream != null) {
|
||||
if (resultStream != null) {
|
||||
List<E> again = new ArrayList<>();
|
||||
List<Facet> facets = getFacets();
|
||||
resultStream.forEach(result -> {
|
||||
updateCache(uow, result, facets);
|
||||
again.add(result);
|
||||
if (result != deleted) {
|
||||
if (updateCache) {
|
||||
cacheUpdate(uow, result, facets);
|
||||
}
|
||||
again.add(result);
|
||||
}
|
||||
});
|
||||
resultStream = again.stream();
|
||||
}
|
||||
|
@ -150,23 +193,23 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
|
|||
|
||||
public CompletableFuture<Stream<E>> async() {
|
||||
return CompletableFuture.<Stream<E>>supplyAsync(() -> {
|
||||
// try {
|
||||
return sync();
|
||||
// } catch (TimeoutException ex) {
|
||||
// throw new CompletionException(ex);
|
||||
// }
|
||||
try {
|
||||
return sync();
|
||||
} catch (TimeoutException ex) {
|
||||
throw new CompletionException(ex);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public CompletableFuture<Stream<E>> async(UnitOfWork<?> uow) {
|
||||
public CompletableFuture<Stream<E>> async(UnitOfWork uow) {
|
||||
if (uow == null)
|
||||
return async();
|
||||
return CompletableFuture.<Stream<E>>supplyAsync(() -> {
|
||||
// try {
|
||||
return sync();
|
||||
// } catch (TimeoutException ex) {
|
||||
// throw new CompletionException(ex);
|
||||
// }
|
||||
try {
|
||||
return sync();
|
||||
} catch (TimeoutException ex) {
|
||||
throw new CompletionException(ex);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,6 +15,9 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
import com.datastax.driver.core.querybuilder.Delete;
|
||||
|
@ -23,6 +26,8 @@ import com.datastax.driver.core.querybuilder.QueryBuilder;
|
|||
|
||||
import net.helenus.core.AbstractSessionOperations;
|
||||
import net.helenus.core.Filter;
|
||||
import net.helenus.core.UnitOfWork;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.support.HelenusMappingException;
|
||||
|
@ -122,4 +127,33 @@ public final class DeleteOperation extends AbstractFilterOperation<ResultSet, De
|
|||
+ entity.getMappingInterface() + " or " + p.getEntity().getMappingInterface());
|
||||
}
|
||||
}
|
||||
|
||||
public List<Facet> bindFacetValues() {
|
||||
return bindFacetValues(getFacets());
|
||||
}
|
||||
|
||||
@Override
|
||||
public ResultSet sync() throws TimeoutException {
|
||||
ResultSet result = super.sync();
|
||||
if (entity.isCacheable()) {
|
||||
sessionOps.cacheEvict(bindFacetValues());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ResultSet sync(UnitOfWork uow) throws TimeoutException {
|
||||
if (uow == null) {
|
||||
return sync();
|
||||
}
|
||||
ResultSet result = super.sync(uow);
|
||||
uow.cacheEvict(bindFacetValues());
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Facet> getFacets() {
|
||||
return entity.getFacets();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
package net.helenus.core.operation;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.function.Function;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
|
@ -27,6 +28,7 @@ import net.helenus.core.AbstractSessionOperations;
|
|||
import net.helenus.core.Getter;
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.UnitOfWork;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.core.reflect.DefaultPrimitiveTypes;
|
||||
import net.helenus.core.reflect.Drafted;
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
|
@ -235,15 +237,38 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
|
|||
}
|
||||
|
||||
@Override
|
||||
public T sync(UnitOfWork uow) {// throws TimeoutException {
|
||||
public T sync() throws TimeoutException {
|
||||
T result = super.sync();
|
||||
if (entity.isCacheable() && result != null) {
|
||||
sessionOps.updateCache(result, entity.getFacets());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public T sync(UnitOfWork uow) throws TimeoutException {
|
||||
if (uow == null) {
|
||||
return sync();
|
||||
}
|
||||
T result = super.sync(uow);
|
||||
Class<?> iface = entity.getMappingInterface();
|
||||
if (resultType == iface) {
|
||||
updateCache(uow, result, entity.getFacets());
|
||||
cacheUpdate(uow, result, entity.getFacets());
|
||||
} else {
|
||||
if (entity.isCacheable()) {
|
||||
sessionOps.cacheEvict(bindFacetValues());
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Facet> getFacets() {
|
||||
if (entity != null) {
|
||||
return entity.getFacets();
|
||||
} else {
|
||||
return new ArrayList<Facet>();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -15,15 +15,22 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.codahale.metrics.Meter;
|
||||
import com.codahale.metrics.MetricRegistry;
|
||||
import com.codahale.metrics.Timer;
|
||||
import com.datastax.driver.core.RegularStatement;
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.ResultSetFuture;
|
||||
import com.datastax.driver.core.Statement;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
import com.google.common.base.Stopwatch;
|
||||
|
||||
import brave.Span;
|
||||
|
@ -35,21 +42,54 @@ import net.helenus.core.cache.Facet;
|
|||
|
||||
public abstract class Operation<E> {
|
||||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(Operation.class);
|
||||
|
||||
protected final AbstractSessionOperations sessionOps;
|
||||
protected final Meter uowCacheHits;
|
||||
protected final Meter uowCacheMiss;
|
||||
protected final Meter sessionCacheHits;
|
||||
protected final Meter sessionCacheMiss;
|
||||
protected final Meter cacheHits;
|
||||
protected final Meter cacheMiss;
|
||||
protected final Timer requestLatency;
|
||||
|
||||
Operation(AbstractSessionOperations sessionOperations) {
|
||||
this.sessionOps = sessionOperations;
|
||||
MetricRegistry metrics = sessionOperations.getMetricRegistry();
|
||||
if (metrics == null) {
|
||||
metrics = new MetricRegistry();
|
||||
}
|
||||
this.uowCacheHits = metrics.meter("net.helenus.UOW-cache-hits");
|
||||
this.uowCacheMiss = metrics.meter("net.helenus.UOW-cache-miss");
|
||||
this.sessionCacheHits = metrics.meter("net.helenus.session-cache-hits");
|
||||
this.sessionCacheMiss = metrics.meter("net.helenus.session-cache-miss");
|
||||
this.cacheHits = metrics.meter("net.helenus.cache-hits");
|
||||
this.cacheMiss = metrics.meter("net.helenus.cache-miss");
|
||||
this.requestLatency = metrics.timer("net.helenus.request-latency");
|
||||
}
|
||||
|
||||
public static String queryString(Statement statement, boolean includeValues) {
|
||||
String query = null;
|
||||
if (statement instanceof BuiltStatement) {
|
||||
BuiltStatement builtStatement = (BuiltStatement) statement;
|
||||
if (includeValues) {
|
||||
RegularStatement regularStatement = builtStatement.setForceNoValues(true);
|
||||
query = regularStatement.getQueryString();
|
||||
} else {
|
||||
query = builtStatement.getQueryString();
|
||||
}
|
||||
} else if (statement instanceof RegularStatement) {
|
||||
RegularStatement regularStatement = (RegularStatement) statement;
|
||||
query = regularStatement.getQueryString();
|
||||
} else {
|
||||
query = statement.toString();
|
||||
|
||||
}
|
||||
return query;
|
||||
}
|
||||
|
||||
public ResultSet execute(AbstractSessionOperations session, UnitOfWork uow, TraceContext traceContext, long timeout,
|
||||
TimeUnit units, boolean showValues, boolean cached) { // throws TimeoutException {
|
||||
TimeUnit units, boolean showValues, boolean cached) throws TimeoutException {
|
||||
|
||||
// Start recording in a Zipkin sub-span our execution time to perform this
|
||||
// operation.
|
||||
|
@ -67,18 +107,20 @@ public abstract class Operation<E> {
|
|||
}
|
||||
|
||||
Statement statement = options(buildStatement(cached));
|
||||
Stopwatch timer = null;
|
||||
if (uow != null) {
|
||||
timer = uow.getExecutionTimer();
|
||||
timer.start();
|
||||
}
|
||||
ResultSetFuture futureResultSet = session.executeAsync(statement, showValues);
|
||||
ResultSet resultSet = futureResultSet.getUninterruptibly(); // TODO(gburd): (timeout, units);
|
||||
Stopwatch timer = Stopwatch.createStarted();
|
||||
try {
|
||||
ResultSetFuture futureResultSet = session.executeAsync(statement, uow, timer, showValues);
|
||||
if (uow != null)
|
||||
uow.recordCacheAndDatabaseOperationCount(0, 1);
|
||||
ResultSet resultSet = futureResultSet.getUninterruptibly(timeout, units);
|
||||
return resultSet;
|
||||
|
||||
if (uow != null)
|
||||
} finally {
|
||||
timer.stop();
|
||||
|
||||
return resultSet;
|
||||
if (uow != null)
|
||||
uow.addDatabaseTime("Cassandra", timer);
|
||||
log(statement, uow, timer, showValues);
|
||||
}
|
||||
|
||||
} finally {
|
||||
|
||||
|
@ -88,6 +130,20 @@ public abstract class Operation<E> {
|
|||
}
|
||||
}
|
||||
|
||||
void log(Statement statement, UnitOfWork uow, Stopwatch timer, boolean showValues) {
|
||||
if (LOG.isInfoEnabled()) {
|
||||
String uowString = "";
|
||||
if (uow != null) {
|
||||
uowString = "UOW(" + uow.hashCode() + ")";
|
||||
}
|
||||
String timerString = "";
|
||||
if (timer != null) {
|
||||
timerString = String.format(" %s ", timer.toString());
|
||||
}
|
||||
LOG.info(String.format("%s%s%s", uowString, timerString, Operation.queryString(statement, false)));
|
||||
}
|
||||
}
|
||||
|
||||
public Statement options(Statement statement) {
|
||||
return statement;
|
||||
}
|
||||
|
@ -97,7 +153,7 @@ public abstract class Operation<E> {
|
|||
}
|
||||
|
||||
public List<Facet> getFacets() {
|
||||
return null;
|
||||
return new ArrayList<Facet>();
|
||||
}
|
||||
|
||||
public List<Facet> bindFacetValues() {
|
||||
|
|
|
@ -38,6 +38,7 @@ import net.helenus.core.cache.Facet;
|
|||
import net.helenus.core.cache.UnboundFacet;
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
import net.helenus.mapping.MappingUtil;
|
||||
import net.helenus.mapping.OrderingDirection;
|
||||
import net.helenus.mapping.value.ColumnValueProvider;
|
||||
|
@ -206,16 +207,18 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
|||
if (facet instanceof UnboundFacet) {
|
||||
UnboundFacet unboundFacet = (UnboundFacet) facet;
|
||||
UnboundFacet.Binder binder = unboundFacet.binder();
|
||||
unboundFacet.getProperties().forEach(prop -> {
|
||||
Filter filter = filters.get(prop);
|
||||
if (filter != null) {
|
||||
Object[] postulates = filter.postulateValues();
|
||||
for (Object p : postulates) {
|
||||
binder.setValueForProperty(prop, p.toString());
|
||||
for (HelenusProperty prop : unboundFacet.getProperties()) {
|
||||
if (filters != null) {
|
||||
Filter filter = filters.get(prop);
|
||||
if (filter != null) {
|
||||
Object[] postulates = filter.postulateValues();
|
||||
for (Object p : postulates) {
|
||||
binder.setValueForProperty(prop, p.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
});
|
||||
}
|
||||
if (binder.isBound()) {
|
||||
boundFacets.add(binder.bind());
|
||||
}
|
||||
|
@ -247,7 +250,9 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
|||
+ entity.getMappingInterface() + " or " + prop.getEntity().getMappingInterface());
|
||||
}
|
||||
|
||||
if (cached) {
|
||||
// TODO(gburd): writeTime and ttl will be useful on merge() but cause object
|
||||
// identity to fail.
|
||||
if (false && cached) {
|
||||
switch (prop.getProperty().getColumnType()) {
|
||||
case PARTITION_KEY :
|
||||
case CLUSTERING_COLUMN :
|
||||
|
|
|
@ -16,7 +16,9 @@
|
|||
package net.helenus.core.operation;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.querybuilder.Assignment;
|
||||
|
@ -25,19 +27,25 @@ import com.datastax.driver.core.querybuilder.QueryBuilder;
|
|||
import com.datastax.driver.core.querybuilder.Update;
|
||||
|
||||
import net.helenus.core.*;
|
||||
import net.helenus.core.cache.BoundFacet;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.core.reflect.MapExportable;
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
import net.helenus.mapping.MappingUtil;
|
||||
import net.helenus.mapping.value.BeanColumnValueProvider;
|
||||
import net.helenus.mapping.value.ValueProviderMap;
|
||||
import net.helenus.support.HelenusMappingException;
|
||||
import net.helenus.support.Immutables;
|
||||
|
||||
public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateOperation<E>> {
|
||||
|
||||
private final List<Assignment> assignments = new ArrayList<Assignment>();
|
||||
private final Map<Assignment, BoundFacet> assignments = new HashMap<>();
|
||||
private final AbstractEntityDraft<E> draft;
|
||||
private final Map<String, Object> draftMap;
|
||||
private HelenusEntity entity = null;
|
||||
private Object pojo;
|
||||
private int[] ttl;
|
||||
private long[] timestamp;
|
||||
|
||||
|
@ -53,13 +61,21 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
this.draftMap = draft.toMap();
|
||||
}
|
||||
|
||||
public UpdateOperation(AbstractSessionOperations sessionOperations, Object pojo) {
|
||||
super(sessionOperations);
|
||||
this.draft = null;
|
||||
this.draftMap = null;
|
||||
this.pojo = pojo;
|
||||
this.entity = Helenus.resolve(MappingUtil.getMappingInterface(pojo));
|
||||
}
|
||||
|
||||
public UpdateOperation(AbstractSessionOperations sessionOperations, HelenusPropertyNode p, Object v) {
|
||||
super(sessionOperations);
|
||||
this.draft = null;
|
||||
this.draftMap = null;
|
||||
|
||||
Object value = sessionOps.getValuePreparer().prepareColumnValue(v, p.getProperty());
|
||||
assignments.add(QueryBuilder.set(p.getColumnName(), value));
|
||||
assignments.put(QueryBuilder.set(p.getColumnName(), value), new BoundFacet(p.getProperty(), v));
|
||||
|
||||
addPropertyNode(p);
|
||||
}
|
||||
|
@ -68,9 +84,29 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
Objects.requireNonNull(getter, "getter is empty");
|
||||
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(getter);
|
||||
HelenusProperty prop = p.getProperty();
|
||||
|
||||
Object value = sessionOps.getValuePreparer().prepareColumnValue(v, p.getProperty());
|
||||
assignments.add(QueryBuilder.set(p.getColumnName(), value));
|
||||
Object value = sessionOps.getValuePreparer().prepareColumnValue(v, prop);
|
||||
assignments.put(QueryBuilder.set(p.getColumnName(), value), new BoundFacet(prop, value));
|
||||
|
||||
if (draft != null) {
|
||||
String key = prop.getPropertyName();
|
||||
if (draft.get(key, value.getClass()) != value) {
|
||||
draft.set(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
if (entity != null) {
|
||||
if (entity.isCacheable() && pojo != null && pojo instanceof MapExportable) {
|
||||
String key = prop.getPropertyName();
|
||||
Map<String, Object> map = ((MapExportable) pojo).toMap();
|
||||
if (!(map instanceof ValueProviderMap)) {
|
||||
if (map.get(key) != value) {
|
||||
map.put(key, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
|
@ -95,15 +131,20 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(counterGetter);
|
||||
|
||||
assignments.add(QueryBuilder.incr(p.getColumnName(), delta));
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
BoundFacet facet = null;
|
||||
if (pojo != null) {
|
||||
HelenusProperty prop = p.getProperty();
|
||||
Long value = (Long) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop);
|
||||
facet = new BoundFacet(prop, value + delta);
|
||||
} else if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
draftMap.put(key, (Long) draftMap.get(key) + delta);
|
||||
}
|
||||
|
||||
assignments.put(QueryBuilder.incr(p.getColumnName(), delta), facet);
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -117,15 +158,20 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(counterGetter);
|
||||
|
||||
assignments.add(QueryBuilder.decr(p.getColumnName(), delta));
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
BoundFacet facet = null;
|
||||
if (pojo != null) {
|
||||
HelenusProperty prop = p.getProperty();
|
||||
Long value = (Long) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop);
|
||||
facet = new BoundFacet(prop, value - delta);
|
||||
} else if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
draftMap.put(key, (Long) draftMap.get(key) - delta);
|
||||
}
|
||||
|
||||
assignments.put(QueryBuilder.decr(p.getColumnName(), delta), facet);
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -144,16 +190,22 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
Object valueObj = prepareSingleListValue(p, value);
|
||||
|
||||
assignments.add(QueryBuilder.prepend(p.getColumnName(), valueObj));
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
BoundFacet facet = null;
|
||||
if (pojo != null) {
|
||||
HelenusProperty prop = p.getProperty();
|
||||
List<V> list = new ArrayList<V>((List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
|
||||
list.add(0, value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
list.add(0, value);
|
||||
}
|
||||
|
||||
assignments.put(QueryBuilder.prepend(p.getColumnName(), valueObj), facet);
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -165,16 +217,22 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
List valueObj = prepareListValue(p, value);
|
||||
|
||||
assignments.add(QueryBuilder.prependAll(p.getColumnName(), valueObj));
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null && value.size() > 0) {
|
||||
BoundFacet facet = null;
|
||||
if (pojo != null) {
|
||||
HelenusProperty prop = p.getProperty();
|
||||
List<V> list = new ArrayList<V>((List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
|
||||
list.addAll(0, value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else if (draft != null && value.size() > 0) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
list.addAll(0, value);
|
||||
}
|
||||
|
||||
assignments.put(QueryBuilder.prependAll(p.getColumnName(), valueObj), facet);
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -186,13 +244,16 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
Object valueObj = prepareSingleListValue(p, value);
|
||||
|
||||
assignments.add(QueryBuilder.setIdx(p.getColumnName(), idx, valueObj));
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
BoundFacet facet = null;
|
||||
if (pojo != null || draft != null) {
|
||||
List<V> list;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
if (pojo != null) {
|
||||
list = new ArrayList<V>((List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
|
||||
} else {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
list = (List<V>) draftMap.get(key);
|
||||
}
|
||||
if (idx < 0) {
|
||||
list.add(0, value);
|
||||
} else if (idx > list.size()) {
|
||||
|
@ -201,8 +262,13 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
list.add(idx, value);
|
||||
}
|
||||
list.add(0, value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
}
|
||||
|
||||
assignments.put(QueryBuilder.setIdx(p.getColumnName(), idx, valueObj), facet);
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -214,15 +280,20 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
Object valueObj = prepareSingleListValue(p, value);
|
||||
|
||||
assignments.add(QueryBuilder.append(p.getColumnName(), valueObj));
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
BoundFacet facet = null;
|
||||
if (pojo != null) {
|
||||
HelenusProperty prop = p.getProperty();
|
||||
List<V> list = new ArrayList<V>((List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
|
||||
list.add(value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
list.add(value);
|
||||
}
|
||||
assignments.put(QueryBuilder.append(p.getColumnName(), valueObj), facet);
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
return this;
|
||||
}
|
||||
|
@ -235,15 +306,20 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
List valueObj = prepareListValue(p, value);
|
||||
|
||||
assignments.add(QueryBuilder.appendAll(p.getColumnName(), valueObj));
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null && value.size() > 0) {
|
||||
BoundFacet facet = null;
|
||||
if (pojo != null) {
|
||||
HelenusProperty prop = p.getProperty();
|
||||
List<V> list = new ArrayList<V>((List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
|
||||
list.addAll(value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else if (draft != null && value.size() > 0) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
list.addAll(value);
|
||||
}
|
||||
assignments.put(QueryBuilder.appendAll(p.getColumnName(), valueObj), facet);
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
return this;
|
||||
}
|
||||
|
@ -256,15 +332,20 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
Object valueObj = prepareSingleListValue(p, value);
|
||||
|
||||
assignments.add(QueryBuilder.discard(p.getColumnName(), valueObj));
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
BoundFacet facet = null;
|
||||
if (pojo != null) {
|
||||
HelenusProperty prop = p.getProperty();
|
||||
List<V> list = new ArrayList<V>((List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
|
||||
list.remove(value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
list.remove(value);
|
||||
}
|
||||
assignments.put(QueryBuilder.discard(p.getColumnName(), valueObj), facet);
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
return this;
|
||||
}
|
||||
|
@ -277,15 +358,20 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
List valueObj = prepareListValue(p, value);
|
||||
|
||||
assignments.add(QueryBuilder.discardAll(p.getColumnName(), valueObj));
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
BoundFacet facet = null;
|
||||
if (pojo != null) {
|
||||
HelenusProperty prop = p.getProperty();
|
||||
List<V> list = new ArrayList<V>((List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
|
||||
list.removeAll(value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
list.removeAll(value);
|
||||
}
|
||||
assignments.put(QueryBuilder.discardAll(p.getColumnName(), valueObj), facet);
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
return this;
|
||||
}
|
||||
|
@ -334,15 +420,20 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
|
||||
Object valueObj = prepareSingleSetValue(p, value);
|
||||
|
||||
assignments.add(QueryBuilder.add(p.getColumnName(), valueObj));
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
BoundFacet facet = null;
|
||||
if (pojo != null) {
|
||||
HelenusProperty prop = p.getProperty();
|
||||
Set<V> set = new HashSet<V>((Set<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
|
||||
set.add(value);
|
||||
facet = new BoundFacet(prop, set);
|
||||
} else if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
Set<V> set = (Set<V>) draftMap.get(key);
|
||||
set.add(value);
|
||||
}
|
||||
assignments.put(QueryBuilder.add(p.getColumnName(), valueObj), facet);
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
return this;
|
||||
}
|
||||
|
@ -355,15 +446,20 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
|
||||
Set valueObj = prepareSetValue(p, value);
|
||||
|
||||
assignments.add(QueryBuilder.addAll(p.getColumnName(), valueObj));
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
BoundFacet facet = null;
|
||||
if (pojo != null) {
|
||||
HelenusProperty prop = p.getProperty();
|
||||
Set<V> set = new HashSet<V>((Set<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
|
||||
set.addAll(value);
|
||||
facet = new BoundFacet(prop, set);
|
||||
} else if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
Set<V> set = (Set<V>) draftMap.get(key);
|
||||
set.addAll(value);
|
||||
}
|
||||
assignments.put(QueryBuilder.addAll(p.getColumnName(), valueObj), facet);
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
return this;
|
||||
}
|
||||
|
@ -376,15 +472,20 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
|
||||
Object valueObj = prepareSingleSetValue(p, value);
|
||||
|
||||
assignments.add(QueryBuilder.remove(p.getColumnName(), valueObj));
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
BoundFacet facet = null;
|
||||
if (pojo != null) {
|
||||
HelenusProperty prop = p.getProperty();
|
||||
Set<V> set = new HashSet<V>((Set<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
|
||||
set.remove(value);
|
||||
facet = new BoundFacet(prop, set);
|
||||
} else if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
Set<V> set = (Set<V>) draftMap.get(key);
|
||||
set.remove(value);
|
||||
}
|
||||
assignments.put(QueryBuilder.remove(p.getColumnName(), valueObj), facet);
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
return this;
|
||||
}
|
||||
|
@ -397,15 +498,20 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
|
||||
Set valueObj = prepareSetValue(p, value);
|
||||
|
||||
assignments.add(QueryBuilder.removeAll(p.getColumnName(), valueObj));
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
BoundFacet facet = null;
|
||||
if (pojo != null) {
|
||||
HelenusProperty prop = p.getProperty();
|
||||
Set<V> set = new HashSet<V>((Set<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
|
||||
set.removeAll(value);
|
||||
facet = new BoundFacet(prop, set);
|
||||
} else if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
Set<V> set = (Set<V>) draftMap.get(key);
|
||||
set.removeAll(value);
|
||||
}
|
||||
assignments.put(QueryBuilder.removeAll(p.getColumnName(), valueObj), facet);
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
return this;
|
||||
}
|
||||
|
@ -453,23 +559,29 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(mapGetter);
|
||||
HelenusProperty prop = p.getProperty();
|
||||
|
||||
BoundFacet facet = null;
|
||||
if (pojo != null) {
|
||||
Map<K, V> map = new HashMap<K, V>(
|
||||
(Map<K, V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
|
||||
map.put(key, value);
|
||||
facet = new BoundFacet(prop, map);
|
||||
} else if (draft != null) {
|
||||
((Map<K, V>) draftMap.get(prop.getPropertyName())).put(key, value);
|
||||
}
|
||||
|
||||
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
Map<Object, Object> convertedMap = (Map<Object, Object>) converter.get()
|
||||
.apply(Immutables.mapOf(key, value));
|
||||
for (Map.Entry<Object, Object> e : convertedMap.entrySet()) {
|
||||
assignments.add(QueryBuilder.put(p.getColumnName(), e.getKey(), e.getValue()));
|
||||
assignments.put(QueryBuilder.put(p.getColumnName(), e.getKey(), e.getValue()), facet);
|
||||
}
|
||||
} else {
|
||||
assignments.add(QueryBuilder.put(p.getColumnName(), key, value));
|
||||
assignments.put(QueryBuilder.put(p.getColumnName(), key, value), facet);
|
||||
}
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
((Map<K, V>) draftMap.get(prop.getPropertyName())).put(key, value);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -481,20 +593,26 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(mapGetter);
|
||||
HelenusProperty prop = p.getProperty();
|
||||
|
||||
BoundFacet facet = null;
|
||||
if (pojo != null) {
|
||||
Map<K, V> newMap = new HashMap<K, V>(
|
||||
(Map<K, V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
|
||||
newMap.putAll(map);
|
||||
facet = new BoundFacet(prop, newMap);
|
||||
} else if (draft != null) {
|
||||
((Map<K, V>) draftMap.get(prop.getPropertyName())).putAll(map);
|
||||
}
|
||||
|
||||
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
Map convertedMap = (Map) converter.get().apply(map);
|
||||
assignments.add(QueryBuilder.putAll(p.getColumnName(), convertedMap));
|
||||
assignments.put(QueryBuilder.putAll(p.getColumnName(), convertedMap), facet);
|
||||
} else {
|
||||
assignments.add(QueryBuilder.putAll(p.getColumnName(), map));
|
||||
assignments.put(QueryBuilder.putAll(p.getColumnName(), map), facet);
|
||||
}
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
((Map<K, V>) draftMap.get(prop.getPropertyName())).putAll(map);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -507,7 +625,7 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
|
||||
Update update = QueryBuilder.update(entity.getName().toCql());
|
||||
|
||||
for (Assignment assignment : assignments) {
|
||||
for (Assignment assignment : assignments.keySet()) {
|
||||
update.with(assignment);
|
||||
}
|
||||
|
||||
|
@ -567,16 +685,48 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
}
|
||||
|
||||
@Override
|
||||
public E sync(UnitOfWork uow) {// throws TimeoutException {
|
||||
public E sync() throws TimeoutException {
|
||||
E result = super.sync();
|
||||
if (entity.isCacheable()) {
|
||||
if (draft != null) {
|
||||
sessionOps.updateCache(draft, bindFacetValues());
|
||||
} else if (pojo != null) {
|
||||
sessionOps.updateCache(pojo, bindFacetValues());
|
||||
} else {
|
||||
sessionOps.cacheEvict(bindFacetValues());
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public E sync(UnitOfWork uow) throws TimeoutException {
|
||||
if (uow == null) {
|
||||
return sync();
|
||||
}
|
||||
E result = super.sync(uow);
|
||||
// TODO(gburd): Only drafted entity objects are updated in the cache at this
|
||||
// time.
|
||||
if (draft != null) {
|
||||
updateCache(uow, result, getFacets());
|
||||
cacheUpdate(uow, result, bindFacetValues());
|
||||
} else if (pojo != null) {
|
||||
cacheUpdate(uow, (E) pojo, bindFacetValues());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Facet> bindFacetValues() {
|
||||
List<Facet> facets = bindFacetValues(entity.getFacets());
|
||||
facets.addAll(assignments.values().stream().distinct().filter(o -> o != null).collect(Collectors.toList()));
|
||||
return facets;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Facet> getFacets() {
|
||||
if (entity != null) {
|
||||
return entity.getFacets();
|
||||
} else {
|
||||
return new ArrayList<Facet>();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -18,6 +18,7 @@ package net.helenus.core.reflect;
|
|||
import java.lang.reflect.InvocationHandler;
|
||||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.Proxy;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
@ -63,31 +64,33 @@ public class DslInvocationHandler<E> implements InvocationHandler {
|
|||
|
||||
private HelenusEntity init(Metadata metadata) {
|
||||
HelenusEntity entity = new HelenusMappingEntity(iface, metadata);
|
||||
Collection<HelenusProperty> properties = entity.getOrderedProperties();
|
||||
if (properties != null) {
|
||||
for (HelenusProperty prop : properties) {
|
||||
|
||||
for (HelenusProperty prop : entity.getOrderedProperties()) {
|
||||
map.put(prop.getGetterMethod(), prop);
|
||||
|
||||
map.put(prop.getGetterMethod(), prop);
|
||||
AbstractDataType type = prop.getDataType();
|
||||
Class<?> javaType = prop.getJavaType();
|
||||
|
||||
AbstractDataType type = prop.getDataType();
|
||||
Class<?> javaType = prop.getJavaType();
|
||||
|
||||
if (type instanceof UDTDataType && !UDTValue.class.isAssignableFrom(javaType)) {
|
||||
|
||||
Object childDsl = Helenus.dsl(javaType, classLoader, Optional.of(new HelenusPropertyNode(prop, parent)),
|
||||
metadata);
|
||||
|
||||
udtMap.put(prop.getGetterMethod(), childDsl);
|
||||
}
|
||||
|
||||
if (type instanceof DTDataType) {
|
||||
DTDataType dataType = (DTDataType) type;
|
||||
|
||||
if (dataType.getDataType() instanceof TupleType && !TupleValue.class.isAssignableFrom(javaType)) {
|
||||
if (type instanceof UDTDataType && !UDTValue.class.isAssignableFrom(javaType)) {
|
||||
|
||||
Object childDsl = Helenus.dsl(javaType, classLoader,
|
||||
Optional.of(new HelenusPropertyNode(prop, parent)), metadata);
|
||||
|
||||
tupleMap.put(prop.getGetterMethod(), childDsl);
|
||||
udtMap.put(prop.getGetterMethod(), childDsl);
|
||||
}
|
||||
|
||||
if (type instanceof DTDataType) {
|
||||
DTDataType dataType = (DTDataType) type;
|
||||
|
||||
if (dataType.getDataType() instanceof TupleType && !TupleValue.class.isAssignableFrom(javaType)) {
|
||||
|
||||
Object childDsl = Helenus.dsl(javaType, classLoader,
|
||||
Optional.of(new HelenusPropertyNode(prop, parent)), metadata);
|
||||
|
||||
tupleMap.put(prop.getGetterMethod(), childDsl);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,17 +15,22 @@
|
|||
*/
|
||||
package net.helenus.core.reflect;
|
||||
|
||||
import java.io.InvalidObjectException;
|
||||
import java.io.ObjectInputStream;
|
||||
import java.io.ObjectStreamException;
|
||||
import java.io.Serializable;
|
||||
import java.lang.invoke.MethodHandles;
|
||||
import java.lang.reflect.Constructor;
|
||||
import java.lang.reflect.InvocationHandler;
|
||||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.Proxy;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.mapping.annotation.Transient;
|
||||
import net.helenus.mapping.value.ValueProviderMap;
|
||||
import net.helenus.support.HelenusException;
|
||||
|
||||
public class MapperInvocationHandler<E> implements InvocationHandler, Serializable {
|
||||
|
@ -59,6 +64,13 @@ public class MapperInvocationHandler<E> implements InvocationHandler, Serializab
|
|||
return result;
|
||||
}
|
||||
|
||||
private Object writeReplace() {
|
||||
return new SerializationProxy<E>(this);
|
||||
}
|
||||
private void readObject(ObjectInputStream stream) throws InvalidObjectException {
|
||||
throw new InvalidObjectException("Proxy required.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
|
||||
|
||||
|
@ -97,12 +109,20 @@ public class MapperInvocationHandler<E> implements InvocationHandler, Serializab
|
|||
return iface.getSimpleName() + ": " + src.toString();
|
||||
}
|
||||
|
||||
if ("writeReplace".equals(methodName)) {
|
||||
return new SerializationProxy(this);
|
||||
}
|
||||
|
||||
if ("readObject".equals(methodName)) {
|
||||
throw new InvalidObjectException("Proxy required.");
|
||||
}
|
||||
|
||||
if ("dsl".equals(methodName)) {
|
||||
return Helenus.dsl(iface);
|
||||
}
|
||||
|
||||
if (MapExportable.TO_MAP_METHOD.equals(methodName)) {
|
||||
return Collections.unmodifiableMap(src);
|
||||
return src; // return Collections.unmodifiableMap(src);
|
||||
}
|
||||
|
||||
Object value = src.get(methodName);
|
||||
|
@ -132,4 +152,30 @@ public class MapperInvocationHandler<E> implements InvocationHandler, Serializab
|
|||
|
||||
return value;
|
||||
}
|
||||
|
||||
static class SerializationProxy<E> implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = -5617583940055969353L;
|
||||
|
||||
private final Class<E> iface;
|
||||
private final Map<String, Object> src;
|
||||
|
||||
public SerializationProxy(MapperInvocationHandler mapper) {
|
||||
this.iface = mapper.iface;
|
||||
if (mapper.src instanceof ValueProviderMap) {
|
||||
this.src = new HashMap<String, Object>(mapper.src.size());
|
||||
Set<String> keys = mapper.src.keySet();
|
||||
for (String key : keys) {
|
||||
this.src.put(key, mapper.src.get(key));
|
||||
}
|
||||
} else {
|
||||
this.src = mapper.src;
|
||||
}
|
||||
}
|
||||
|
||||
Object readResolve() throws ObjectStreamException {
|
||||
return new MapperInvocationHandler(iface, src);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
*/
|
||||
package net.helenus.core.reflect;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.lang.reflect.Proxy;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -28,7 +29,8 @@ public enum ReflectionMapperInstantiator implements MapperInstantiator {
|
|||
public <E> E instantiate(Class<E> iface, Map<String, Object> src, ClassLoader classLoader) {
|
||||
|
||||
MapperInvocationHandler<E> handler = new MapperInvocationHandler<E>(iface, src);
|
||||
E proxy = (E) Proxy.newProxyInstance(classLoader, new Class[]{iface, MapExportable.class}, handler);
|
||||
E proxy = (E) Proxy.newProxyInstance(classLoader, new Class[]{iface, MapExportable.class, Serializable.class},
|
||||
handler);
|
||||
return proxy;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,6 +18,8 @@ package net.helenus.mapping;
|
|||
import java.lang.reflect.Method;
|
||||
import java.util.*;
|
||||
|
||||
import javax.validation.ConstraintValidator;
|
||||
|
||||
import org.apache.commons.lang3.ClassUtils;
|
||||
|
||||
import com.datastax.driver.core.DefaultMetadata;
|
||||
|
@ -31,6 +33,7 @@ import net.helenus.core.annotation.Cacheable;
|
|||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.core.cache.UnboundFacet;
|
||||
import net.helenus.mapping.annotation.*;
|
||||
import net.helenus.mapping.validator.DistinctValidator;
|
||||
import net.helenus.support.HelenusMappingException;
|
||||
|
||||
public final class HelenusMappingEntity implements HelenusEntity {
|
||||
|
@ -125,10 +128,12 @@ public final class HelenusMappingEntity implements HelenusEntity {
|
|||
facetsBuilder.add(new UnboundFacet(primaryKeyProperties));
|
||||
primaryKeyProperties = null;
|
||||
}
|
||||
Optional<IdentityName> optionalIndexName = prop.getIndexName();
|
||||
if (optionalIndexName.isPresent()) {
|
||||
UnboundFacet facet = new UnboundFacet(prop);
|
||||
facetsBuilder.add(facet);
|
||||
for (ConstraintValidator<?, ?> constraint : MappingUtil.getValidators(prop.getGetterMethod())) {
|
||||
if (constraint.getClass().isAssignableFrom(DistinctValidator.class)) {
|
||||
UnboundFacet facet = new UnboundFacet(prop);
|
||||
facetsBuilder.add(facet);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
package net.helenus.mapping;
|
||||
|
||||
import java.lang.annotation.Annotation;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
@ -282,4 +283,41 @@ public final class MappingUtil {
|
|||
return e.getPropertyNode();
|
||||
}
|
||||
}
|
||||
|
||||
// https://stackoverflow.com/a/4882306/366692
|
||||
public static <T> T clone(T object) throws CloneNotSupportedException {
|
||||
Object clone = null;
|
||||
|
||||
// Use reflection, because there is no other way
|
||||
try {
|
||||
Method method = object.getClass().getMethod("clone");
|
||||
clone = method.invoke(object);
|
||||
} catch (InvocationTargetException e) {
|
||||
rethrow(e.getCause());
|
||||
} catch (Exception cause) {
|
||||
rethrow(cause);
|
||||
}
|
||||
if (object.getClass().isInstance(clone)) {
|
||||
@SuppressWarnings("unchecked") // clone class <= object class <= T
|
||||
T t = (T) clone;
|
||||
return t;
|
||||
} else {
|
||||
throw new ClassCastException(clone.getClass().getName());
|
||||
}
|
||||
}
|
||||
|
||||
private static void rethrow(Throwable cause) throws CloneNotSupportedException {
|
||||
if (cause instanceof RuntimeException) {
|
||||
throw (RuntimeException) cause;
|
||||
}
|
||||
if (cause instanceof Error) {
|
||||
throw (Error) cause;
|
||||
}
|
||||
if (cause instanceof CloneNotSupportedException) {
|
||||
throw (CloneNotSupportedException) cause;
|
||||
}
|
||||
CloneNotSupportedException e = new CloneNotSupportedException();
|
||||
e.initCause(cause);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -252,4 +252,29 @@ public final class Constraints {
|
|||
*/
|
||||
int flags();
|
||||
}
|
||||
|
||||
/**
|
||||
* Distinct annotation is used to signal, but not ensure that a value should be
|
||||
* distinct in the database.
|
||||
*
|
||||
* <p>
|
||||
* Can be used only for @java.lang.CharSequence
|
||||
*
|
||||
* <p>
|
||||
* It does not have effect on selects and data retrieval operations
|
||||
*/
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
|
||||
@Constraint(validatedBy = DistinctValidator.class)
|
||||
public @interface Distinct {
|
||||
|
||||
/**
|
||||
* User defined Enum to further restrict the items in the set.
|
||||
*
|
||||
* @return Java
|
||||
*/
|
||||
Class<? extends Enum> value() default Enum.class;
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -65,4 +65,10 @@ public @interface Index {
|
|||
* @return true if the index should ignore case when comparing
|
||||
*/
|
||||
boolean caseSensitive() default true;
|
||||
|
||||
/**
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
boolean distinct() default false;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,35 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.helenus.mapping.validator;
|
||||
|
||||
import javax.validation.ConstraintValidator;
|
||||
import javax.validation.ConstraintValidatorContext;
|
||||
|
||||
import net.helenus.mapping.annotation.Constraints;
|
||||
|
||||
public final class DistinctValidator implements ConstraintValidator<Constraints.Distinct, CharSequence> {
|
||||
|
||||
@Override
|
||||
public void initialize(Constraints.Distinct constraintAnnotation) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isValid(CharSequence value, ConstraintValidatorContext context) {
|
||||
// TODO(gburd): if there is an Enum type supplied, check that value is valid
|
||||
// Enum.name()
|
||||
return true;
|
||||
}
|
||||
}
|
|
@ -15,87 +15,83 @@
|
|||
*/
|
||||
package net.helenus.test.integration.build;
|
||||
|
||||
import com.datastax.driver.core.Cluster;
|
||||
import com.datastax.driver.core.KeyspaceMetadata;
|
||||
import com.datastax.driver.core.Session;
|
||||
import java.io.IOException;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.cassandra.exceptions.ConfigurationException;
|
||||
import org.apache.thrift.transport.TTransportException;
|
||||
import org.cassandraunit.utils.EmbeddedCassandraServerHelper;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
import com.datastax.driver.core.Cluster;
|
||||
import com.datastax.driver.core.KeyspaceMetadata;
|
||||
import com.datastax.driver.core.Session;
|
||||
|
||||
/** AbstractEmbeddedCassandraTest */
|
||||
public abstract class AbstractEmbeddedCassandraTest {
|
||||
|
||||
private static Cluster cluster;
|
||||
private static Cluster cluster;
|
||||
|
||||
private static String keyspace;
|
||||
private static String keyspace;
|
||||
|
||||
private static Session session;
|
||||
private static Session session;
|
||||
|
||||
private static boolean keep;
|
||||
private static boolean keep;
|
||||
|
||||
public static boolean isConnected() {
|
||||
return session != null;
|
||||
}
|
||||
public static boolean isConnected() {
|
||||
return session != null;
|
||||
}
|
||||
|
||||
public static Cluster getCluster() {
|
||||
return cluster;
|
||||
}
|
||||
public static Cluster getCluster() {
|
||||
return cluster;
|
||||
}
|
||||
|
||||
public static Session getSession() {
|
||||
return session;
|
||||
}
|
||||
public static Session getSession() {
|
||||
return session;
|
||||
}
|
||||
|
||||
public static String getKeyspace() {
|
||||
return keyspace;
|
||||
}
|
||||
public static String getKeyspace() {
|
||||
return keyspace;
|
||||
}
|
||||
|
||||
public static void setKeep(boolean enable) {
|
||||
keep = enable;
|
||||
}
|
||||
public static void setKeep(boolean enable) {
|
||||
keep = enable;
|
||||
}
|
||||
|
||||
@BeforeClass
|
||||
public static void startCassandraEmbeddedServer()
|
||||
throws TTransportException, IOException, InterruptedException, ConfigurationException {
|
||||
keyspace = "test" + UUID.randomUUID().toString().replace("-", "");
|
||||
EmbeddedCassandraServerHelper.startEmbeddedCassandra(
|
||||
EmbeddedCassandraServerHelper.CASSANDRA_RNDPORT_YML_FILE);
|
||||
@BeforeClass
|
||||
public static void startCassandraEmbeddedServer()
|
||||
throws TTransportException, IOException, InterruptedException, ConfigurationException {
|
||||
keyspace = "test" + UUID.randomUUID().toString().replace("-", "");
|
||||
EmbeddedCassandraServerHelper.startEmbeddedCassandra(EmbeddedCassandraServerHelper.CASSANDRA_RNDPORT_YML_FILE);
|
||||
|
||||
cluster =
|
||||
Cluster.builder()
|
||||
.addContactPoint(EmbeddedCassandraServerHelper.getHost())
|
||||
.withPort(EmbeddedCassandraServerHelper.getNativeTransportPort())
|
||||
.build();
|
||||
cluster = Cluster.builder().addContactPoint(EmbeddedCassandraServerHelper.getHost())
|
||||
.withPort(EmbeddedCassandraServerHelper.getNativeTransportPort()).build();
|
||||
|
||||
KeyspaceMetadata kmd = cluster.getMetadata().getKeyspace(keyspace);
|
||||
if (kmd == null) {
|
||||
session = cluster.connect();
|
||||
KeyspaceMetadata kmd = cluster.getMetadata().getKeyspace(keyspace);
|
||||
if (kmd == null) {
|
||||
session = cluster.connect();
|
||||
|
||||
String cql =
|
||||
"CREATE KEYSPACE "
|
||||
+ keyspace
|
||||
+ " WITH replication = {'class': 'SimpleStrategy', 'replication_factor' : 1}"
|
||||
+ " AND DURABLE_WRITES = false;";
|
||||
System.out.println(cql + "\n");
|
||||
session.execute(cql);
|
||||
String cql = "CREATE KEYSPACE " + keyspace
|
||||
+ " WITH replication = {'class': 'SimpleStrategy', 'replication_factor' : 1}"
|
||||
+ " AND DURABLE_WRITES = false;";
|
||||
System.out.println(cql + "\n");
|
||||
session.execute(cql);
|
||||
|
||||
cql = "USE " + keyspace + ";";
|
||||
System.out.println(cql + "\n");
|
||||
session.execute(cql);
|
||||
} else {
|
||||
session = cluster.connect(keyspace);
|
||||
}
|
||||
}
|
||||
cql = "USE " + keyspace + ";";
|
||||
System.out.println(cql + "\n");
|
||||
session.execute(cql);
|
||||
} else {
|
||||
session = cluster.connect(keyspace);
|
||||
}
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void after() {
|
||||
if (!keep && isConnected()) {
|
||||
session.close();
|
||||
session = null;
|
||||
EmbeddedCassandraServerHelper.cleanEmbeddedCassandra();
|
||||
}
|
||||
}
|
||||
@AfterClass
|
||||
public static void after() {
|
||||
if (!keep && isConnected()) {
|
||||
session.close();
|
||||
session = null;
|
||||
EmbeddedCassandraServerHelper.cleanEmbeddedCassandra();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,18 +15,19 @@
|
|||
*/
|
||||
package net.helenus.test.integration.core;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
import org.junit.Test;
|
||||
|
||||
public class ContextInitTest extends AbstractEmbeddedCassandraTest {
|
||||
|
||||
@Test
|
||||
public void test() {
|
||||
@Test
|
||||
public void test() {
|
||||
|
||||
HelenusSession session = Helenus.init(getSession()).get();
|
||||
HelenusSession session = Helenus.init(getSession()).get();
|
||||
|
||||
System.out.println("Works! " + session);
|
||||
}
|
||||
System.out.println("Works! " + session);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
package net.helenus.test.integration.core;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusValidator;
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
|
@ -10,43 +13,40 @@ import net.helenus.mapping.annotation.Table;
|
|||
import net.helenus.support.HelenusException;
|
||||
import net.helenus.support.HelenusMappingException;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
public class HelenusValidatorTest extends AbstractEmbeddedCassandraTest {
|
||||
|
||||
@Table
|
||||
interface ModelForValidation {
|
||||
HelenusEntity entity;
|
||||
HelenusProperty prop;
|
||||
|
||||
@Constraints.Email
|
||||
@PartitionKey
|
||||
String id();
|
||||
}
|
||||
@Before
|
||||
public void begin() {
|
||||
Helenus.init(getSession()).singleton();
|
||||
|
||||
HelenusEntity entity;
|
||||
entity = Helenus.entity(ModelForValidation.class);
|
||||
|
||||
HelenusProperty prop;
|
||||
prop = entity.getProperty("id");
|
||||
}
|
||||
|
||||
@Before
|
||||
public void begin() {
|
||||
Helenus.init(getSession()).singleton();
|
||||
@Test(expected = HelenusMappingException.class)
|
||||
public void testWrongType() {
|
||||
HelenusValidator.INSTANCE.validate(prop, Integer.valueOf(123));
|
||||
}
|
||||
|
||||
entity = Helenus.entity(ModelForValidation.class);
|
||||
@Test(expected = HelenusException.class)
|
||||
public void testWrongValue() {
|
||||
HelenusValidator.INSTANCE.validate(prop, "123");
|
||||
}
|
||||
|
||||
prop = entity.getProperty("id");
|
||||
}
|
||||
public void testOk() {
|
||||
HelenusValidator.INSTANCE.validate(prop, "a@b.c");
|
||||
}
|
||||
|
||||
@Test(expected = HelenusMappingException.class)
|
||||
public void testWrongType() {
|
||||
HelenusValidator.INSTANCE.validate(prop, Integer.valueOf(123));
|
||||
}
|
||||
@Table
|
||||
interface ModelForValidation {
|
||||
|
||||
@Test(expected = HelenusException.class)
|
||||
public void testWrongValue() {
|
||||
HelenusValidator.INSTANCE.validate(prop, "123");
|
||||
}
|
||||
|
||||
public void testOk() {
|
||||
HelenusValidator.INSTANCE.validate(prop, "a@b.c");
|
||||
}
|
||||
@Constraints.Email
|
||||
@PartitionKey
|
||||
String id();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,407 +15,340 @@
|
|||
*/
|
||||
package net.helenus.test.integration.core.collection;
|
||||
|
||||
import static net.helenus.core.Query.eq;
|
||||
import static net.helenus.core.Query.get;
|
||||
import static net.helenus.core.Query.getIdx;
|
||||
import static net.helenus.core.Query.*;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
|
||||
public class CollectionTest extends AbstractEmbeddedCassandraTest {
|
||||
|
||||
static Customer customer;
|
||||
static Customer customer;
|
||||
|
||||
static HelenusSession session;
|
||||
static HelenusSession session;
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeTest() {
|
||||
session = Helenus.init(getSession()).showCql().add(Customer.class).autoCreateDrop().get();
|
||||
customer = Helenus.dsl(Customer.class, session.getMetadata());
|
||||
}
|
||||
@BeforeClass
|
||||
public static void beforeTest() {
|
||||
session = Helenus.init(getSession()).showCql().add(Customer.class).autoCreateDrop().get();
|
||||
customer = Helenus.dsl(Customer.class, session.getMetadata());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPrint() {
|
||||
System.out.println(customer);
|
||||
}
|
||||
@Test
|
||||
public void testPrint() {
|
||||
System.out.println(customer);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSetCRUID() throws TimeoutException {
|
||||
@Test
|
||||
public void testSetCRUID() throws TimeoutException {
|
||||
|
||||
UUID id = UUID.randomUUID();
|
||||
UUID id = UUID.randomUUID();
|
||||
|
||||
Set<String> aliases = new HashSet<String>();
|
||||
aliases.add("Alex");
|
||||
aliases.add("Albert");
|
||||
Set<String> aliases = new HashSet<String>();
|
||||
aliases.add("Alex");
|
||||
aliases.add("Albert");
|
||||
|
||||
// CREATE
|
||||
// CREATE
|
||||
|
||||
session.insert().value(customer::id, id).value(customer::aliases, aliases).sync();
|
||||
session.insert().value(customer::id, id).value(customer::aliases, aliases).sync();
|
||||
|
||||
// READ
|
||||
// READ
|
||||
|
||||
// read full object
|
||||
// read full object
|
||||
|
||||
Customer actual =
|
||||
session.<Customer>select(customer).where(customer::id, eq(id)).single().sync().orElse(null);
|
||||
Assert.assertEquals(id, actual.id());
|
||||
Assert.assertEquals(aliases, actual.aliases());
|
||||
Assert.assertNull(actual.names());
|
||||
Assert.assertNull(actual.properties());
|
||||
Customer actual = session.<Customer>select(customer).where(customer::id, eq(id)).single().sync().orElse(null);
|
||||
Assert.assertEquals(id, actual.id());
|
||||
Assert.assertEquals(aliases, actual.aliases());
|
||||
Assert.assertNull(actual.names());
|
||||
Assert.assertNull(actual.properties());
|
||||
|
||||
// read full set
|
||||
// read full set
|
||||
|
||||
Set<String> actualSet =
|
||||
session.select(customer::aliases).where(customer::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertEquals(aliases, actualSet);
|
||||
Set<String> actualSet = session.select(customer::aliases).where(customer::id, eq(id)).sync().findFirst()
|
||||
.get()._1;
|
||||
Assert.assertEquals(aliases, actualSet);
|
||||
|
||||
// UPDATE
|
||||
// UPDATE
|
||||
|
||||
Set<String> expected = new HashSet<String>();
|
||||
expected.add("unknown");
|
||||
Set<String> expected = new HashSet<String>();
|
||||
expected.add("unknown");
|
||||
|
||||
session.update().set(customer::aliases, expected).where(customer::id, eq(id)).sync();
|
||||
session.update().set(customer::aliases, expected).where(customer::id, eq(id)).sync();
|
||||
|
||||
actual =
|
||||
session.<Customer>select(customer).where(customer::id, eq(id)).single().sync().orElse(null);
|
||||
actual = session.<Customer>select(customer).where(customer::id, eq(id)).single().sync().orElse(null);
|
||||
|
||||
Assert.assertEquals(id, actual.id());
|
||||
Assert.assertEquals(expected, actual.aliases());
|
||||
Assert.assertEquals(id, actual.id());
|
||||
Assert.assertEquals(expected, actual.aliases());
|
||||
|
||||
// INSERT
|
||||
// INSERT
|
||||
|
||||
// add operation
|
||||
// add operation
|
||||
|
||||
expected.add("add");
|
||||
session.update().add(customer::aliases, "add").where(customer::id, eq(id)).sync();
|
||||
expected.add("add");
|
||||
session.update().add(customer::aliases, "add").where(customer::id, eq(id)).sync();
|
||||
|
||||
actualSet =
|
||||
session.select(customer::aliases).where(customer::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertEquals(expected, actualSet);
|
||||
actualSet = session.select(customer::aliases).where(customer::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertEquals(expected, actualSet);
|
||||
|
||||
// addAll operation
|
||||
expected.addAll(aliases);
|
||||
session.update().addAll(customer::aliases, aliases).where(customer::id, eq(id)).sync();
|
||||
// addAll operation
|
||||
expected.addAll(aliases);
|
||||
session.update().addAll(customer::aliases, aliases).where(customer::id, eq(id)).sync();
|
||||
|
||||
actualSet =
|
||||
session.select(customer::aliases).where(customer::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertEquals(expected, actualSet);
|
||||
actualSet = session.select(customer::aliases).where(customer::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertEquals(expected, actualSet);
|
||||
|
||||
// DELETE
|
||||
// DELETE
|
||||
|
||||
// remove single value
|
||||
// remove single value
|
||||
|
||||
expected.remove("add");
|
||||
session.update().remove(customer::aliases, "add").where(customer::id, eq(id)).sync();
|
||||
expected.remove("add");
|
||||
session.update().remove(customer::aliases, "add").where(customer::id, eq(id)).sync();
|
||||
|
||||
actualSet =
|
||||
session.select(customer::aliases).where(customer::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertEquals(expected, actualSet);
|
||||
actualSet = session.select(customer::aliases).where(customer::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertEquals(expected, actualSet);
|
||||
|
||||
// remove values
|
||||
// remove values
|
||||
|
||||
expected.removeAll(aliases);
|
||||
session.update().removeAll(customer::aliases, aliases).where(customer::id, eq(id)).sync();
|
||||
expected.removeAll(aliases);
|
||||
session.update().removeAll(customer::aliases, aliases).where(customer::id, eq(id)).sync();
|
||||
|
||||
actualSet =
|
||||
session.select(customer::aliases).where(customer::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertEquals(expected, actualSet);
|
||||
actualSet = session.select(customer::aliases).where(customer::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertEquals(expected, actualSet);
|
||||
|
||||
// remove full list
|
||||
// remove full list
|
||||
|
||||
session.update().set(customer::aliases, null).where(customer::id, eq(id)).sync();
|
||||
session.update().set(customer::aliases, null).where(customer::id, eq(id)).sync();
|
||||
|
||||
actualSet =
|
||||
session.select(customer::aliases).where(customer::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertNull(actualSet);
|
||||
actualSet = session.select(customer::aliases).where(customer::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertNull(actualSet);
|
||||
|
||||
// remove object
|
||||
// remove object
|
||||
|
||||
session.delete().where(customer::id, eq(id)).sync();
|
||||
Long cnt = session.count().where(customer::id, eq(id)).sync();
|
||||
Assert.assertEquals(Long.valueOf(0), cnt);
|
||||
}
|
||||
session.delete().where(customer::id, eq(id)).sync();
|
||||
Long cnt = session.count().where(customer::id, eq(id)).sync();
|
||||
Assert.assertEquals(Long.valueOf(0), cnt);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testListCRUID() throws TimeoutException {
|
||||
@Test
|
||||
public void testListCRUID() throws TimeoutException {
|
||||
|
||||
UUID id = UUID.randomUUID();
|
||||
UUID id = UUID.randomUUID();
|
||||
|
||||
List<String> names = new ArrayList<String>();
|
||||
names.add("Alex");
|
||||
names.add("Albert");
|
||||
List<String> names = new ArrayList<String>();
|
||||
names.add("Alex");
|
||||
names.add("Albert");
|
||||
|
||||
// CREATE
|
||||
// CREATE
|
||||
|
||||
session.insert().value(customer::id, id).value(customer::names, names).sync();
|
||||
session.insert().value(customer::id, id).value(customer::names, names).sync();
|
||||
|
||||
// READ
|
||||
// READ
|
||||
|
||||
// read full object
|
||||
// read full object
|
||||
|
||||
Customer actual =
|
||||
session.<Customer>select(customer).where(customer::id, eq(id)).single().sync().orElse(null);
|
||||
Customer actual = session.<Customer>select(customer).where(customer::id, eq(id)).single().sync().orElse(null);
|
||||
|
||||
Assert.assertEquals(id, actual.id());
|
||||
Assert.assertEquals(names, actual.names());
|
||||
Assert.assertNull(actual.aliases());
|
||||
Assert.assertNull(actual.properties());
|
||||
Assert.assertEquals(id, actual.id());
|
||||
Assert.assertEquals(names, actual.names());
|
||||
Assert.assertNull(actual.aliases());
|
||||
Assert.assertNull(actual.properties());
|
||||
|
||||
// read full list
|
||||
// read full list
|
||||
|
||||
List<String> actualList =
|
||||
session.select(customer::names).where(customer::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertEquals(names, actualList);
|
||||
List<String> actualList = session.select(customer::names).where(customer::id, eq(id)).sync().findFirst()
|
||||
.get()._1;
|
||||
Assert.assertEquals(names, actualList);
|
||||
|
||||
// read single value by index
|
||||
// read single value by index
|
||||
|
||||
String cql = session.select(getIdx(customer::names, 1)).where(customer::id, eq(id)).cql();
|
||||
String cql = session.select(getIdx(customer::names, 1)).where(customer::id, eq(id)).cql();
|
||||
|
||||
System.out.println("Still not supporting cql = " + cql);
|
||||
System.out.println("Still not supporting cql = " + cql);
|
||||
|
||||
// UPDATE
|
||||
// UPDATE
|
||||
|
||||
List<String> expected = new ArrayList<String>();
|
||||
expected.add("unknown");
|
||||
List<String> expected = new ArrayList<String>();
|
||||
expected.add("unknown");
|
||||
|
||||
session.update().set(customer::names, expected).where(customer::id, eq(id)).sync();
|
||||
session.update().set(customer::names, expected).where(customer::id, eq(id)).sync();
|
||||
|
||||
actual =
|
||||
session.<Customer>select(customer).where(customer::id, eq(id)).single().sync().orElse(null);
|
||||
actual = session.<Customer>select(customer).where(customer::id, eq(id)).single().sync().orElse(null);
|
||||
|
||||
Assert.assertEquals(id, actual.id());
|
||||
Assert.assertEquals(expected, actual.names());
|
||||
Assert.assertEquals(id, actual.id());
|
||||
Assert.assertEquals(expected, actual.names());
|
||||
|
||||
// INSERT
|
||||
// INSERT
|
||||
|
||||
// prepend operation
|
||||
// prepend operation
|
||||
|
||||
expected.add(0, "prepend");
|
||||
session.update().prepend(customer::names, "prepend").where(customer::id, eq(id)).sync();
|
||||
expected.add(0, "prepend");
|
||||
session.update().prepend(customer::names, "prepend").where(customer::id, eq(id)).sync();
|
||||
|
||||
actualList =
|
||||
session.select(customer::names).where(customer::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertEquals(expected, actualList);
|
||||
actualList = session.select(customer::names).where(customer::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertEquals(expected, actualList);
|
||||
|
||||
// append operation
|
||||
// append operation
|
||||
|
||||
expected.add("append");
|
||||
session.update().append(customer::names, "append").where(customer::id, eq(id)).sync();
|
||||
expected.add("append");
|
||||
session.update().append(customer::names, "append").where(customer::id, eq(id)).sync();
|
||||
|
||||
actualList =
|
||||
session.select(customer::names).where(customer::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertEquals(expected, actualList);
|
||||
actualList = session.select(customer::names).where(customer::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertEquals(expected, actualList);
|
||||
|
||||
// prependAll operation
|
||||
expected.addAll(0, names);
|
||||
session.update().prependAll(customer::names, names).where(customer::id, eq(id)).sync();
|
||||
// prependAll operation
|
||||
expected.addAll(0, names);
|
||||
session.update().prependAll(customer::names, names).where(customer::id, eq(id)).sync();
|
||||
|
||||
actualList =
|
||||
session.select(customer::names).where(customer::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertEquals(expected, actualList);
|
||||
actualList = session.select(customer::names).where(customer::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertEquals(expected, actualList);
|
||||
|
||||
// appendAll operation
|
||||
expected.addAll(names);
|
||||
session.update().appendAll(customer::names, names).where(customer::id, eq(id)).sync();
|
||||
// appendAll operation
|
||||
expected.addAll(names);
|
||||
session.update().appendAll(customer::names, names).where(customer::id, eq(id)).sync();
|
||||
|
||||
actualList =
|
||||
session.select(customer::names).where(customer::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertEquals(expected, actualList);
|
||||
actualList = session.select(customer::names).where(customer::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertEquals(expected, actualList);
|
||||
|
||||
// set by Index
|
||||
// set by Index
|
||||
|
||||
expected.set(5, "inserted");
|
||||
session.update().setIdx(customer::names, 5, "inserted").where(customer::id, eq(id)).sync();
|
||||
expected.set(5, "inserted");
|
||||
session.update().setIdx(customer::names, 5, "inserted").where(customer::id, eq(id)).sync();
|
||||
|
||||
actualList =
|
||||
session.select(customer::names).where(customer::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertEquals(expected, actualList);
|
||||
actualList = session.select(customer::names).where(customer::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertEquals(expected, actualList);
|
||||
|
||||
// DELETE
|
||||
// DELETE
|
||||
|
||||
// remove single value
|
||||
// remove single value
|
||||
|
||||
expected.remove("inserted");
|
||||
session.update().discard(customer::names, "inserted").where(customer::id, eq(id)).sync();
|
||||
expected.remove("inserted");
|
||||
session.update().discard(customer::names, "inserted").where(customer::id, eq(id)).sync();
|
||||
|
||||
actualList =
|
||||
session.select(customer::names).where(customer::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertEquals(expected, actualList);
|
||||
actualList = session.select(customer::names).where(customer::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertEquals(expected, actualList);
|
||||
|
||||
// remove values
|
||||
// remove values
|
||||
|
||||
expected.removeAll(names);
|
||||
session.update().discardAll(customer::names, names).where(customer::id, eq(id)).sync();
|
||||
expected.removeAll(names);
|
||||
session.update().discardAll(customer::names, names).where(customer::id, eq(id)).sync();
|
||||
|
||||
actualList =
|
||||
session.select(customer::names).where(customer::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertEquals(expected, actualList);
|
||||
actualList = session.select(customer::names).where(customer::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertEquals(expected, actualList);
|
||||
|
||||
// remove full list
|
||||
// remove full list
|
||||
|
||||
session.update().set(customer::names, null).where(customer::id, eq(id)).sync();
|
||||
session.update().set(customer::names, null).where(customer::id, eq(id)).sync();
|
||||
|
||||
actualList =
|
||||
session.select(customer::names).where(customer::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertNull(actualList);
|
||||
actualList = session.select(customer::names).where(customer::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertNull(actualList);
|
||||
|
||||
// remove object
|
||||
// remove object
|
||||
|
||||
session.delete().where(customer::id, eq(id)).sync();
|
||||
Long cnt = session.count().where(customer::id, eq(id)).sync();
|
||||
Assert.assertEquals(Long.valueOf(0), cnt);
|
||||
}
|
||||
session.delete().where(customer::id, eq(id)).sync();
|
||||
Long cnt = session.count().where(customer::id, eq(id)).sync();
|
||||
Assert.assertEquals(Long.valueOf(0), cnt);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMapCRUID() throws TimeoutException {
|
||||
@Test
|
||||
public void testMapCRUID() throws TimeoutException {
|
||||
|
||||
UUID id = UUID.randomUUID();
|
||||
UUID id = UUID.randomUUID();
|
||||
|
||||
Map<String, String> props = new HashMap<String, String>();
|
||||
props.put("key1", "value1");
|
||||
props.put("key2", "value2");
|
||||
Map<String, String> props = new HashMap<String, String>();
|
||||
props.put("key1", "value1");
|
||||
props.put("key2", "value2");
|
||||
|
||||
// CREATE
|
||||
// CREATE
|
||||
|
||||
session.insert().value(customer::id, id).value(customer::properties, props).sync();
|
||||
session.insert().value(customer::id, id).value(customer::properties, props).sync();
|
||||
|
||||
// READ
|
||||
// READ
|
||||
|
||||
// read full object
|
||||
// read full object
|
||||
|
||||
Customer actual =
|
||||
session.<Customer>select(customer).where(customer::id, eq(id)).single().sync().orElse(null);
|
||||
Customer actual = session.<Customer>select(customer).where(customer::id, eq(id)).single().sync().orElse(null);
|
||||
|
||||
Assert.assertEquals(id, actual.id());
|
||||
Assert.assertEquals(props, actual.properties());
|
||||
Assert.assertNull(actual.aliases());
|
||||
Assert.assertNull(actual.names());
|
||||
Assert.assertEquals(id, actual.id());
|
||||
Assert.assertEquals(props, actual.properties());
|
||||
Assert.assertNull(actual.aliases());
|
||||
Assert.assertNull(actual.names());
|
||||
|
||||
// read full map
|
||||
// read full map
|
||||
|
||||
Map<String, String> actualMap =
|
||||
session
|
||||
.select(customer::properties)
|
||||
.where(customer::id, eq(id))
|
||||
.sync()
|
||||
.findFirst()
|
||||
.get()
|
||||
._1;
|
||||
Assert.assertEquals(props, actualMap);
|
||||
Map<String, String> actualMap = session.select(customer::properties).where(customer::id, eq(id)).sync()
|
||||
.findFirst().get()._1;
|
||||
Assert.assertEquals(props, actualMap);
|
||||
|
||||
// read single key-value in map
|
||||
// read single key-value in map
|
||||
|
||||
String cql =
|
||||
session.select(get(customer::properties, "key1")).where(customer::id, eq(id)).cql();
|
||||
String cql = session.select(get(customer::properties, "key1")).where(customer::id, eq(id)).cql();
|
||||
|
||||
System.out.println("Still not supporting cql = " + cql);
|
||||
System.out.println("Still not supporting cql = " + cql);
|
||||
|
||||
// UPDATE
|
||||
// UPDATE
|
||||
|
||||
Map<String, String> expected = new HashMap<String, String>();
|
||||
expected.put("k1", "v1");
|
||||
expected.put("k2", "v2");
|
||||
Map<String, String> expected = new HashMap<String, String>();
|
||||
expected.put("k1", "v1");
|
||||
expected.put("k2", "v2");
|
||||
|
||||
session.update().set(customer::properties, expected).where(customer::id, eq(id)).sync();
|
||||
session.update().set(customer::properties, expected).where(customer::id, eq(id)).sync();
|
||||
|
||||
actual =
|
||||
session.<Customer>select(customer).where(customer::id, eq(id)).single().sync().orElse(null);
|
||||
Assert.assertEquals(id, actual.id());
|
||||
Assert.assertEquals(expected, actual.properties());
|
||||
actual = session.<Customer>select(customer).where(customer::id, eq(id)).single().sync().orElse(null);
|
||||
Assert.assertEquals(id, actual.id());
|
||||
Assert.assertEquals(expected, actual.properties());
|
||||
|
||||
// INSERT
|
||||
// INSERT
|
||||
|
||||
// put operation
|
||||
// put operation
|
||||
|
||||
expected.put("k3", "v3");
|
||||
session.update().put(customer::properties, "k3", "v3").where(customer::id, eq(id)).sync();
|
||||
expected.put("k3", "v3");
|
||||
session.update().put(customer::properties, "k3", "v3").where(customer::id, eq(id)).sync();
|
||||
|
||||
actualMap =
|
||||
session
|
||||
.select(customer::properties)
|
||||
.where(customer::id, eq(id))
|
||||
.sync()
|
||||
.findFirst()
|
||||
.get()
|
||||
._1;
|
||||
Assert.assertEquals(expected, actualMap);
|
||||
actualMap = session.select(customer::properties).where(customer::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertEquals(expected, actualMap);
|
||||
|
||||
// putAll operation
|
||||
expected.putAll(props);
|
||||
session.update().putAll(customer::properties, props).where(customer::id, eq(id)).sync();
|
||||
// putAll operation
|
||||
expected.putAll(props);
|
||||
session.update().putAll(customer::properties, props).where(customer::id, eq(id)).sync();
|
||||
|
||||
actualMap =
|
||||
session
|
||||
.select(customer::properties)
|
||||
.where(customer::id, eq(id))
|
||||
.sync()
|
||||
.findFirst()
|
||||
.get()
|
||||
._1;
|
||||
Assert.assertEquals(expected, actualMap);
|
||||
actualMap = session.select(customer::properties).where(customer::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertEquals(expected, actualMap);
|
||||
|
||||
// put existing
|
||||
// put existing
|
||||
|
||||
expected.put("k3", "v33");
|
||||
session.update().put(customer::properties, "k3", "v33").where(customer::id, eq(id)).sync();
|
||||
expected.put("k3", "v33");
|
||||
session.update().put(customer::properties, "k3", "v33").where(customer::id, eq(id)).sync();
|
||||
|
||||
actualMap =
|
||||
session
|
||||
.select(customer::properties)
|
||||
.where(customer::id, eq(id))
|
||||
.sync()
|
||||
.findFirst()
|
||||
.get()
|
||||
._1;
|
||||
Assert.assertEquals(expected, actualMap);
|
||||
actualMap = session.select(customer::properties).where(customer::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertEquals(expected, actualMap);
|
||||
|
||||
// DELETE
|
||||
// DELETE
|
||||
|
||||
// remove single key
|
||||
// remove single key
|
||||
|
||||
expected.remove("k3");
|
||||
session.update().put(customer::properties, "k3", null).where(customer::id, eq(id)).sync();
|
||||
expected.remove("k3");
|
||||
session.update().put(customer::properties, "k3", null).where(customer::id, eq(id)).sync();
|
||||
|
||||
actualMap =
|
||||
session
|
||||
.select(customer::properties)
|
||||
.where(customer::id, eq(id))
|
||||
.sync()
|
||||
.findFirst()
|
||||
.get()
|
||||
._1;
|
||||
Assert.assertEquals(expected, actualMap);
|
||||
|
||||
// remove full map
|
||||
|
||||
session.update().set(customer::properties, null).where(customer::id, eq(id)).sync();
|
||||
|
||||
actualMap =
|
||||
session
|
||||
.select(customer::properties)
|
||||
.where(customer::id, eq(id))
|
||||
.sync()
|
||||
.findFirst()
|
||||
.get()
|
||||
._1;
|
||||
Assert.assertNull(actualMap);
|
||||
|
||||
// remove object
|
||||
|
||||
session.delete().where(customer::id, eq(id)).sync();
|
||||
Long cnt = session.count().where(customer::id, eq(id)).sync();
|
||||
Assert.assertEquals(Long.valueOf(0), cnt);
|
||||
}
|
||||
actualMap = session.select(customer::properties).where(customer::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertEquals(expected, actualMap);
|
||||
|
||||
// remove full map
|
||||
|
||||
session.update().set(customer::properties, null).where(customer::id, eq(id)).sync();
|
||||
|
||||
actualMap = session.select(customer::properties).where(customer::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertNull(actualMap);
|
||||
|
||||
// remove object
|
||||
|
||||
session.delete().where(customer::id, eq(id)).sync();
|
||||
Long cnt = session.count().where(customer::id, eq(id)).sync();
|
||||
Assert.assertEquals(Long.valueOf(0), cnt);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,11 +15,13 @@
|
|||
*/
|
||||
package net.helenus.test.integration.core.collection;
|
||||
|
||||
import com.datastax.driver.core.DataType.Name;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
|
||||
import com.datastax.driver.core.DataType.Name;
|
||||
|
||||
import net.helenus.mapping.annotation.PartitionKey;
|
||||
import net.helenus.mapping.annotation.Table;
|
||||
import net.helenus.mapping.annotation.Types;
|
||||
|
@ -27,15 +29,15 @@ import net.helenus.mapping.annotation.Types;
|
|||
@Table
|
||||
public interface Customer {
|
||||
|
||||
@PartitionKey
|
||||
UUID id();
|
||||
@PartitionKey
|
||||
UUID id();
|
||||
|
||||
@Types.Set(Name.TEXT)
|
||||
Set<String> aliases();
|
||||
@Types.Set(Name.TEXT)
|
||||
Set<String> aliases();
|
||||
|
||||
@Types.List(Name.TEXT)
|
||||
List<String> names();
|
||||
@Types.List(Name.TEXT)
|
||||
List<String> names();
|
||||
|
||||
@Types.Map(key = Name.TEXT, value = Name.TEXT)
|
||||
Map<String, String> properties();
|
||||
@Types.Map(key = Name.TEXT, value = Name.TEXT)
|
||||
Map<String, String> properties();
|
||||
}
|
||||
|
|
|
@ -17,92 +17,89 @@ package net.helenus.test.integration.core.compound;
|
|||
|
||||
import java.util.Date;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.core.Operator;
|
||||
import net.helenus.core.Query;
|
||||
import net.helenus.support.Mutable;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
public class CompondKeyTest extends AbstractEmbeddedCassandraTest {
|
||||
|
||||
Timeline timeline;
|
||||
Timeline timeline;
|
||||
|
||||
HelenusSession session;
|
||||
HelenusSession session;
|
||||
|
||||
public static class TimelineImpl implements Timeline {
|
||||
@Before
|
||||
public void beforeTest() {
|
||||
session = Helenus.init(getSession()).showCql().add(Timeline.class).autoCreateDrop().get();
|
||||
timeline = Helenus.dsl(Timeline.class, session.getMetadata());
|
||||
}
|
||||
|
||||
UUID userId;
|
||||
Date timestamp;
|
||||
String text;
|
||||
@Test
|
||||
public void test() throws Exception {
|
||||
|
||||
@Override
|
||||
public UUID userId() {
|
||||
return userId;
|
||||
}
|
||||
UUID userId = UUID.randomUUID();
|
||||
long postTime = System.currentTimeMillis() - 100000L;
|
||||
|
||||
@Override
|
||||
public Date timestamp() {
|
||||
return timestamp;
|
||||
}
|
||||
session.showCql(false);
|
||||
|
||||
@Override
|
||||
public String text() {
|
||||
return text;
|
||||
}
|
||||
}
|
||||
for (int i = 0; i != 100; ++i) {
|
||||
|
||||
@Before
|
||||
public void beforeTest() {
|
||||
session = Helenus.init(getSession()).showCql().add(Timeline.class).autoCreateDrop().get();
|
||||
timeline = Helenus.dsl(Timeline.class, session.getMetadata());
|
||||
}
|
||||
TimelineImpl post = new TimelineImpl();
|
||||
post.userId = userId;
|
||||
post.timestamp = new Date(postTime + 1000L * i);
|
||||
post.text = "hello";
|
||||
|
||||
@Test
|
||||
public void test() throws Exception {
|
||||
session.upsert(post).sync();
|
||||
}
|
||||
|
||||
UUID userId = UUID.randomUUID();
|
||||
long postTime = System.currentTimeMillis() - 100000L;
|
||||
session.showCql(true);
|
||||
|
||||
session.showCql(false);
|
||||
final Mutable<Date> d = new Mutable<Date>(null);
|
||||
final Mutable<Integer> c = new Mutable<Integer>(0);
|
||||
|
||||
for (int i = 0; i != 100; ++i) {
|
||||
session.select(timeline::userId, timeline::timestamp, timeline::text)
|
||||
.where(timeline::userId, Operator.EQ, userId).orderBy(Query.desc(timeline::timestamp)).limit(5).sync()
|
||||
.forEach(t -> {
|
||||
|
||||
TimelineImpl post = new TimelineImpl();
|
||||
post.userId = userId;
|
||||
post.timestamp = new Date(postTime + 1000L * i);
|
||||
post.text = "hello";
|
||||
// System.out.println(t);
|
||||
c.set(c.get() + 1);
|
||||
|
||||
session.upsert(post).sync();
|
||||
}
|
||||
Date cd = d.get();
|
||||
if (cd != null) {
|
||||
Assert.assertTrue(cd.after(t._2));
|
||||
}
|
||||
d.set(t._2);
|
||||
});
|
||||
|
||||
session.showCql(true);
|
||||
Assert.assertEquals(Integer.valueOf(5), c.get());
|
||||
}
|
||||
|
||||
final Mutable<Date> d = new Mutable<Date>(null);
|
||||
final Mutable<Integer> c = new Mutable<Integer>(0);
|
||||
public static class TimelineImpl implements Timeline {
|
||||
|
||||
session
|
||||
.select(timeline::userId, timeline::timestamp, timeline::text)
|
||||
.where(timeline::userId, Operator.EQ, userId)
|
||||
.orderBy(Query.desc(timeline::timestamp))
|
||||
.limit(5)
|
||||
.sync()
|
||||
.forEach(
|
||||
t -> {
|
||||
UUID userId;
|
||||
Date timestamp;
|
||||
String text;
|
||||
|
||||
//System.out.println(t);
|
||||
c.set(c.get() + 1);
|
||||
@Override
|
||||
public UUID userId() {
|
||||
return userId;
|
||||
}
|
||||
|
||||
Date cd = d.get();
|
||||
if (cd != null) {
|
||||
Assert.assertTrue(cd.after(t._2));
|
||||
}
|
||||
d.set(t._2);
|
||||
});
|
||||
@Override
|
||||
public Date timestamp() {
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
Assert.assertEquals(Integer.valueOf(5), c.get());
|
||||
}
|
||||
@Override
|
||||
public String text() {
|
||||
return text;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,22 +17,19 @@ package net.helenus.test.integration.core.compound;
|
|||
|
||||
import java.util.Date;
|
||||
import java.util.UUID;
|
||||
import net.helenus.mapping.annotation.ClusteringColumn;
|
||||
import net.helenus.mapping.annotation.Column;
|
||||
import net.helenus.mapping.annotation.PartitionKey;
|
||||
import net.helenus.mapping.annotation.Table;
|
||||
import net.helenus.mapping.annotation.Types;
|
||||
|
||||
import net.helenus.mapping.annotation.*;
|
||||
|
||||
@Table
|
||||
public interface Timeline {
|
||||
|
||||
@PartitionKey(ordinal = 0)
|
||||
UUID userId();
|
||||
@PartitionKey(ordinal = 0)
|
||||
UUID userId();
|
||||
|
||||
@ClusteringColumn(ordinal = 1)
|
||||
@Types.Timeuuid
|
||||
Date timestamp();
|
||||
@ClusteringColumn(ordinal = 1)
|
||||
@Types.Timeuuid
|
||||
Date timestamp();
|
||||
|
||||
@Column(ordinal = 2)
|
||||
String text();
|
||||
@Column(ordinal = 2)
|
||||
String text();
|
||||
}
|
||||
|
|
|
@ -18,46 +18,46 @@ package net.helenus.test.integration.core.counter;
|
|||
import static net.helenus.core.Query.eq;
|
||||
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
|
||||
public class CounterTest extends AbstractEmbeddedCassandraTest {
|
||||
|
||||
static Page page;
|
||||
static Page page;
|
||||
|
||||
static HelenusSession session;
|
||||
static HelenusSession session;
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeTest() {
|
||||
session = Helenus.init(getSession()).showCql().add(Page.class).autoCreateDrop().get();
|
||||
page = Helenus.dsl(Page.class, session.getMetadata());
|
||||
}
|
||||
@BeforeClass
|
||||
public static void beforeTest() {
|
||||
session = Helenus.init(getSession()).showCql().add(Page.class).autoCreateDrop().get();
|
||||
page = Helenus.dsl(Page.class, session.getMetadata());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPrint() {
|
||||
System.out.println(page);
|
||||
}
|
||||
@Test
|
||||
public void testPrint() {
|
||||
System.out.println(page);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCounter() throws TimeoutException {
|
||||
@Test
|
||||
public void testCounter() throws TimeoutException {
|
||||
|
||||
boolean exists =
|
||||
session.select(page::hits).where(page::alias, eq("index")).sync().findFirst().isPresent();
|
||||
Assert.assertFalse(exists);
|
||||
boolean exists = session.select(page::hits).where(page::alias, eq("index")).sync().findFirst().isPresent();
|
||||
Assert.assertFalse(exists);
|
||||
|
||||
session.update().increment(page::hits, 10L).where(page::alias, eq("index")).sync();
|
||||
session.update().increment(page::hits, 10L).where(page::alias, eq("index")).sync();
|
||||
|
||||
long hits =
|
||||
session.select(page::hits).where(page::alias, eq("index")).sync().findFirst().get()._1;
|
||||
Assert.assertEquals(10, hits);
|
||||
long hits = session.select(page::hits).where(page::alias, eq("index")).sync().findFirst().get()._1;
|
||||
Assert.assertEquals(10, hits);
|
||||
|
||||
session.update().decrement(page::hits).where(page::alias, eq("index")).sync();
|
||||
session.update().decrement(page::hits).where(page::alias, eq("index")).sync();
|
||||
|
||||
hits = session.select(page::hits).where(page::alias, eq("index")).sync().findFirst().get()._1;
|
||||
Assert.assertEquals(9, hits);
|
||||
}
|
||||
hits = session.select(page::hits).where(page::alias, eq("index")).sync().findFirst().get()._1;
|
||||
Assert.assertEquals(9, hits);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,9 +22,9 @@ import net.helenus.mapping.annotation.Types;
|
|||
@Table
|
||||
public interface Page {
|
||||
|
||||
@PartitionKey
|
||||
String alias();
|
||||
@PartitionKey
|
||||
String alias();
|
||||
|
||||
@Types.Counter
|
||||
long hits();
|
||||
@Types.Counter
|
||||
long hits();
|
||||
}
|
||||
|
|
|
@ -15,75 +15,100 @@
|
|||
*/
|
||||
package net.helenus.test.integration.core.draft;
|
||||
|
||||
import java.io.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
|
||||
import static net.helenus.core.Query.eq;
|
||||
|
||||
public class EntityDraftBuilderTest extends AbstractEmbeddedCassandraTest {
|
||||
|
||||
static Supply supply;
|
||||
static HelenusSession session;
|
||||
static Supply supply;
|
||||
static HelenusSession session;
|
||||
static Supply.Draft draft = null;
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeTest() {
|
||||
session = Helenus.init(getSession()).showCql().add(Supply.class).autoCreateDrop().get();
|
||||
supply = session.dsl(Supply.class);
|
||||
}
|
||||
@BeforeClass
|
||||
public static void beforeTest() throws TimeoutException {
|
||||
session = Helenus.init(getSession()).showCql().add(Supply.class).autoCreateDrop().get();
|
||||
supply = session.dsl(Supply.class);
|
||||
|
||||
@Test
|
||||
public void testFoo() throws Exception {
|
||||
Supply.Draft draft = null;
|
||||
|
||||
draft =
|
||||
Supply.draft("APAC")
|
||||
.code("WIDGET-002")
|
||||
.description("Our second Widget!")
|
||||
.demand(
|
||||
new HashMap<String, Long>() {
|
||||
{
|
||||
put("APAC", 100L);
|
||||
put("EMEA", 10000L);
|
||||
put("NORAM", 2000000L);
|
||||
}
|
||||
})
|
||||
.shipments(
|
||||
new HashSet<String>() {
|
||||
{
|
||||
add("HMS Puddle in transit to APAC, 100 units.");
|
||||
add("Frigate Jimmy in transit to EMEA, 10000 units.");
|
||||
}
|
||||
})
|
||||
.suppliers(
|
||||
new ArrayList<String>() {
|
||||
{
|
||||
add("Puddle, Inc.");
|
||||
add("Jimmy Town, LTD.");
|
||||
}
|
||||
draft = Supply.draft("APAC").code("WIDGET-002").description("Our second Widget!")
|
||||
.demand(new HashMap<String, Long>() {
|
||||
{
|
||||
put("APAC", 100L);
|
||||
put("EMEA", 10000L);
|
||||
put("NORAM", 2000000L);
|
||||
}
|
||||
}).shipments(new HashSet<String>() {
|
||||
{
|
||||
add("HMS Puddle in transit to APAC, 100 units.");
|
||||
add("Frigate Jimmy in transit to EMEA, 10000 units.");
|
||||
}
|
||||
}).suppliers(new ArrayList<String>() {
|
||||
{
|
||||
add("Puddle, Inc.");
|
||||
add("Jimmy Town, LTD.");
|
||||
}
|
||||
});
|
||||
|
||||
Supply s1 = session.<Supply>insert(draft).sync();
|
||||
Supply s1 = session.<Supply>insert(draft).sync();
|
||||
}
|
||||
|
||||
// List
|
||||
Supply s2 =
|
||||
session
|
||||
.<Supply>update(s1.update())
|
||||
.prepend(supply::suppliers, "Pignose Supply, LLC.")
|
||||
.sync();
|
||||
Assert.assertEquals(s2.suppliers().get(0), "Pignose Supply, LLC.");
|
||||
@Test
|
||||
public void testFoo() throws Exception {
|
||||
|
||||
// Set
|
||||
String shipment = "Pignose, on the way! (1M units)";
|
||||
Supply s3 = session.<Supply>update(s2.update()).add(supply::shipments, shipment).sync();
|
||||
Assert.assertTrue(s3.shipments().contains(shipment));
|
||||
Supply s1 = session.<Supply>select(Supply.class).where(supply::id, eq(draft.id()))
|
||||
.single()
|
||||
.sync()
|
||||
.orElse(null);
|
||||
|
||||
// Map
|
||||
Supply s4 = session.<Supply>update(s3.update()).put(supply::demand, "NORAM", 10L).sync();
|
||||
Assert.assertEquals((long) s4.demand().get("NORAM"), 10L);
|
||||
}
|
||||
// List
|
||||
Supply s2 = session.<Supply>update(s1.update()).prepend(supply::suppliers, "Pignose Supply, LLC.").sync();
|
||||
Assert.assertEquals(s2.suppliers().get(0), "Pignose Supply, LLC.");
|
||||
|
||||
// Set
|
||||
String shipment = "Pignose, on the way! (1M units)";
|
||||
Supply s3 = session.<Supply>update(s2.update()).add(supply::shipments, shipment).sync();
|
||||
Assert.assertTrue(s3.shipments().contains(shipment));
|
||||
|
||||
// Map
|
||||
Supply s4 = session.<Supply>update(s3.update()).put(supply::demand, "NORAM", 10L).sync();
|
||||
Assert.assertEquals((long) s4.demand().get("NORAM"), 10L);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSerialization() throws Exception {
|
||||
Supply s1, s2;
|
||||
|
||||
s1 = session.<Supply>select(Supply.class).where(supply::id, eq(draft.id()))
|
||||
.single()
|
||||
.sync()
|
||||
.orElse(null);
|
||||
|
||||
byte[] data;
|
||||
try (ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||
ObjectOutput out = new ObjectOutputStream(bos)) {
|
||||
out.writeObject(s1);
|
||||
out.flush();
|
||||
data = bos.toByteArray();
|
||||
}
|
||||
|
||||
try (ByteArrayInputStream bis = new ByteArrayInputStream(data);
|
||||
ObjectInput in = new ObjectInputStream(bis)) {
|
||||
s2 = (Supply)in.readObject();
|
||||
}
|
||||
|
||||
Assert.assertEquals(s2.id(), s1.id());
|
||||
Assert.assertEquals(s2, s1);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package net.helenus.test.integration.core.draft;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
import net.helenus.core.AbstractAuditedEntityDraft;
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.reflect.MapExportable;
|
||||
|
@ -9,85 +10,85 @@ import net.helenus.mapping.annotation.*;
|
|||
@Table
|
||||
public interface Inventory {
|
||||
|
||||
static Inventory inventory = Helenus.dsl(Inventory.class);
|
||||
static Inventory inventory = Helenus.dsl(Inventory.class);
|
||||
|
||||
@PartitionKey
|
||||
UUID id();
|
||||
@Transient
|
||||
static Draft draft(UUID id) {
|
||||
return new Draft(id);
|
||||
}
|
||||
|
||||
@Column("emea")
|
||||
@Types.Counter
|
||||
long EMEA();
|
||||
@PartitionKey
|
||||
UUID id();
|
||||
|
||||
@Column("noram")
|
||||
@Types.Counter
|
||||
long NORAM();
|
||||
@Column("emea")
|
||||
@Types.Counter
|
||||
long EMEA();
|
||||
|
||||
@Column("apac")
|
||||
@Types.Counter
|
||||
long APAC();
|
||||
@Column("noram")
|
||||
@Types.Counter
|
||||
long NORAM();
|
||||
|
||||
@Transient
|
||||
static Draft draft(UUID id) {
|
||||
return new Draft(id);
|
||||
}
|
||||
@Column("apac")
|
||||
@Types.Counter
|
||||
long APAC();
|
||||
|
||||
@Transient
|
||||
default Draft update() {
|
||||
return new Draft(this);
|
||||
}
|
||||
@Transient
|
||||
default Draft update() {
|
||||
return new Draft(this);
|
||||
}
|
||||
|
||||
class Draft extends AbstractAuditedEntityDraft<Inventory> {
|
||||
class Draft extends AbstractAuditedEntityDraft<Inventory> {
|
||||
|
||||
// Entity/Draft pattern-enabling methods:
|
||||
Draft(UUID id) {
|
||||
super(null);
|
||||
// Entity/Draft pattern-enabling methods:
|
||||
Draft(UUID id) {
|
||||
super(null);
|
||||
|
||||
// Primary Key:
|
||||
set(inventory::id, id);
|
||||
}
|
||||
// Primary Key:
|
||||
set(inventory::id, id);
|
||||
}
|
||||
|
||||
Draft(Inventory inventory) {
|
||||
super((MapExportable) inventory);
|
||||
}
|
||||
Draft(Inventory inventory) {
|
||||
super((MapExportable) inventory);
|
||||
}
|
||||
|
||||
public Class<Inventory> getEntityClass() {
|
||||
return Inventory.class;
|
||||
}
|
||||
public Class<Inventory> getEntityClass() {
|
||||
return Inventory.class;
|
||||
}
|
||||
|
||||
protected String getCurrentAuditor() {
|
||||
return "unknown";
|
||||
}
|
||||
protected String getCurrentAuditor() {
|
||||
return "unknown";
|
||||
}
|
||||
|
||||
// Immutable properties:
|
||||
public UUID id() {
|
||||
return this.<UUID>get(inventory::id, UUID.class);
|
||||
}
|
||||
// Immutable properties:
|
||||
public UUID id() {
|
||||
return this.<UUID>get(inventory::id, UUID.class);
|
||||
}
|
||||
|
||||
public long EMEA() {
|
||||
return this.<Long>get(inventory::EMEA, long.class);
|
||||
}
|
||||
public long EMEA() {
|
||||
return this.<Long>get(inventory::EMEA, long.class);
|
||||
}
|
||||
|
||||
public Draft EMEA(long count) {
|
||||
mutate(inventory::EMEA, count);
|
||||
return this;
|
||||
}
|
||||
public Draft EMEA(long count) {
|
||||
mutate(inventory::EMEA, count);
|
||||
return this;
|
||||
}
|
||||
|
||||
public long APAC() {
|
||||
return this.<Long>get(inventory::APAC, long.class);
|
||||
}
|
||||
public long APAC() {
|
||||
return this.<Long>get(inventory::APAC, long.class);
|
||||
}
|
||||
|
||||
public Draft APAC(long count) {
|
||||
mutate(inventory::APAC, count);
|
||||
return this;
|
||||
}
|
||||
public Draft APAC(long count) {
|
||||
mutate(inventory::APAC, count);
|
||||
return this;
|
||||
}
|
||||
|
||||
public long NORAM() {
|
||||
return this.<Long>get(inventory::NORAM, long.class);
|
||||
}
|
||||
public long NORAM() {
|
||||
return this.<Long>get(inventory::NORAM, long.class);
|
||||
}
|
||||
|
||||
public Draft NORAM(long count) {
|
||||
mutate(inventory::NORAM, count);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
public Draft NORAM(long count) {
|
||||
mutate(inventory::NORAM, count);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
package net.helenus.test.integration.core.draft;
|
||||
|
||||
import com.datastax.driver.core.utils.UUIDs;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
|
||||
import com.datastax.driver.core.utils.UUIDs;
|
||||
|
||||
import net.helenus.core.AbstractEntityDraft;
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.reflect.MapExportable;
|
||||
|
@ -13,133 +15,133 @@ import net.helenus.mapping.annotation.*;
|
|||
@Table
|
||||
public interface Supply {
|
||||
|
||||
static Supply supply = Helenus.dsl(Supply.class);
|
||||
static Supply supply = Helenus.dsl(Supply.class);
|
||||
|
||||
@PartitionKey
|
||||
UUID id();
|
||||
@Transient
|
||||
static Draft draft(String region) {
|
||||
return new Draft(region);
|
||||
}
|
||||
|
||||
@ClusteringColumn(ordinal = 0)
|
||||
default String region() {
|
||||
return "NORAM";
|
||||
}
|
||||
@PartitionKey
|
||||
UUID id();
|
||||
|
||||
@Index(caseSensitive = false)
|
||||
String code();
|
||||
@ClusteringColumn(ordinal = 0)
|
||||
default String region() {
|
||||
return "NORAM";
|
||||
}
|
||||
|
||||
@Index
|
||||
String description(); // @IndexText == lucene index
|
||||
@Index(caseSensitive = false)
|
||||
String code();
|
||||
|
||||
@Index
|
||||
Map<String, Long> demand();
|
||||
@Index
|
||||
String description(); // @IndexText == lucene index
|
||||
|
||||
@Index
|
||||
List<String> suppliers();
|
||||
@Index
|
||||
Map<String, Long> demand();
|
||||
|
||||
@Index
|
||||
Set<String> shipments();
|
||||
@Index
|
||||
List<String> suppliers();
|
||||
|
||||
@Transient
|
||||
static Draft draft(String region) {
|
||||
return new Draft(region);
|
||||
}
|
||||
@Index
|
||||
Set<String> shipments();
|
||||
|
||||
@Transient
|
||||
default Draft update() {
|
||||
return new Draft(this);
|
||||
}
|
||||
@Transient
|
||||
default Draft update() {
|
||||
return new Draft(this);
|
||||
}
|
||||
|
||||
class Draft extends AbstractEntityDraft<Supply> {
|
||||
class Draft extends AbstractEntityDraft<Supply> {
|
||||
|
||||
// Entity/Draft pattern-enabling methods:
|
||||
Draft(String region) {
|
||||
super(null);
|
||||
// Entity/Draft pattern-enabling methods:
|
||||
Draft(String region) {
|
||||
super(null);
|
||||
|
||||
// Primary Key:
|
||||
set(supply::id, UUIDs.timeBased());
|
||||
set(supply::region, region);
|
||||
}
|
||||
// Primary Key:
|
||||
set(supply::id, UUIDs.timeBased());
|
||||
set(supply::region, region);
|
||||
}
|
||||
|
||||
Draft(Supply supply) {
|
||||
super((MapExportable) supply);
|
||||
}
|
||||
Draft(Supply supply) {
|
||||
super((MapExportable) supply);
|
||||
}
|
||||
|
||||
public Class<Supply> getEntityClass() {
|
||||
return Supply.class;
|
||||
}
|
||||
public Class<Supply> getEntityClass() {
|
||||
return Supply.class;
|
||||
}
|
||||
|
||||
// Immutable properties:
|
||||
public UUID id() {
|
||||
return this.<UUID>get(supply::id, UUID.class);
|
||||
}
|
||||
// Immutable properties:
|
||||
public UUID id() {
|
||||
return this.<UUID>get(supply::id, UUID.class);
|
||||
}
|
||||
|
||||
public String region() {
|
||||
return this.<String>get(supply::region, String.class);
|
||||
}
|
||||
public String region() {
|
||||
return this.<String>get(supply::region, String.class);
|
||||
}
|
||||
|
||||
// Mutable properties:
|
||||
public String code() {
|
||||
return this.<String>get(supply::code, String.class);
|
||||
}
|
||||
// Mutable properties:
|
||||
public String code() {
|
||||
return this.<String>get(supply::code, String.class);
|
||||
}
|
||||
|
||||
public Draft code(String code) {
|
||||
mutate(supply::code, code);
|
||||
return this;
|
||||
}
|
||||
public Draft code(String code) {
|
||||
mutate(supply::code, code);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Draft setCode(String code) {
|
||||
return code(code);
|
||||
}
|
||||
public Draft setCode(String code) {
|
||||
return code(code);
|
||||
}
|
||||
|
||||
public String description() {
|
||||
return this.<String>get(supply::description, String.class);
|
||||
}
|
||||
public String description() {
|
||||
return this.<String>get(supply::description, String.class);
|
||||
}
|
||||
|
||||
public Draft description(String description) {
|
||||
mutate(supply::description, description);
|
||||
return this;
|
||||
}
|
||||
public Draft description(String description) {
|
||||
mutate(supply::description, description);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Draft setDescription(String description) {
|
||||
return description(description);
|
||||
}
|
||||
public Draft setDescription(String description) {
|
||||
return description(description);
|
||||
}
|
||||
|
||||
public Map<String, Long> demand() {
|
||||
return this.<Map<String, Long>>get(supply::demand, Map.class);
|
||||
}
|
||||
public Map<String, Long> demand() {
|
||||
return this.<Map<String, Long>>get(supply::demand, Map.class);
|
||||
}
|
||||
|
||||
public Draft demand(Map<String, Long> demand) {
|
||||
mutate(supply::demand, demand);
|
||||
return this;
|
||||
}
|
||||
public Draft demand(Map<String, Long> demand) {
|
||||
mutate(supply::demand, demand);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Draft setDemand(Map<String, Long> demand) {
|
||||
return demand(demand);
|
||||
}
|
||||
public Draft setDemand(Map<String, Long> demand) {
|
||||
return demand(demand);
|
||||
}
|
||||
|
||||
public List<String> suppliers() {
|
||||
return this.<List<String>>get(supply::suppliers, List.class);
|
||||
}
|
||||
public List<String> suppliers() {
|
||||
return this.<List<String>>get(supply::suppliers, List.class);
|
||||
}
|
||||
|
||||
public Draft suppliers(List<String> suppliers) {
|
||||
mutate(supply::suppliers, suppliers);
|
||||
return this;
|
||||
}
|
||||
public Draft suppliers(List<String> suppliers) {
|
||||
mutate(supply::suppliers, suppliers);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Draft setSuppliers(List<String> suppliers) {
|
||||
return suppliers(suppliers);
|
||||
}
|
||||
public Draft setSuppliers(List<String> suppliers) {
|
||||
return suppliers(suppliers);
|
||||
}
|
||||
|
||||
public Set<String> shipments() {
|
||||
return this.<Set<String>>get(supply::shipments, Set.class);
|
||||
}
|
||||
public Set<String> shipments() {
|
||||
return this.<Set<String>>get(supply::shipments, Set.class);
|
||||
}
|
||||
|
||||
public Draft shipments(Set<String> shipments) {
|
||||
mutate(supply::shipments, shipments);
|
||||
return this;
|
||||
}
|
||||
public Draft shipments(Set<String> shipments) {
|
||||
mutate(supply::shipments, shipments);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Draft setshipments(Set<String> shipments) {
|
||||
return shipments(shipments);
|
||||
}
|
||||
}
|
||||
public Draft setshipments(Set<String> shipments) {
|
||||
return shipments(shipments);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,17 +23,17 @@ import net.helenus.mapping.annotation.Transient;
|
|||
@InheritedTable
|
||||
public interface Animal {
|
||||
|
||||
@PartitionKey(ordinal = 0)
|
||||
int id();
|
||||
@PartitionKey(ordinal = 0)
|
||||
int id();
|
||||
|
||||
@Column(ordinal = 1)
|
||||
boolean eatable();
|
||||
@Column(ordinal = 1)
|
||||
boolean eatable();
|
||||
|
||||
@Column
|
||||
boolean warmBlodded();
|
||||
@Column
|
||||
boolean warmBlodded();
|
||||
|
||||
@Transient
|
||||
default Animal me() {
|
||||
return this;
|
||||
}
|
||||
@Transient
|
||||
default Animal me() {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,7 +22,7 @@ import net.helenus.mapping.annotation.Table;
|
|||
@Table("cats")
|
||||
public interface Cat extends Mammal {
|
||||
|
||||
@Column(ordinal = 0)
|
||||
@Index(caseSensitive = false)
|
||||
String nickname();
|
||||
@Column(ordinal = 0)
|
||||
@Index(caseSensitive = false)
|
||||
String nickname();
|
||||
}
|
||||
|
|
|
@ -5,73 +5,59 @@ import static net.helenus.core.Query.eq;
|
|||
import java.util.Optional;
|
||||
import java.util.Random;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
|
||||
public class HierarchyTest extends AbstractEmbeddedCassandraTest {
|
||||
|
||||
static Cat cat;
|
||||
static Cat cat;
|
||||
|
||||
static Pig pig;
|
||||
static Pig pig;
|
||||
|
||||
static HelenusSession session;
|
||||
static HelenusSession session;
|
||||
|
||||
static Random rnd = new Random();
|
||||
static Random rnd = new Random();
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeTest() {
|
||||
session =
|
||||
Helenus.init(getSession()).showCql().add(Cat.class).add(Pig.class).autoCreateDrop().get();
|
||||
cat = Helenus.dsl(Cat.class);
|
||||
pig = Helenus.dsl(Pig.class);
|
||||
}
|
||||
@BeforeClass
|
||||
public static void beforeTest() {
|
||||
session = Helenus.init(getSession()).showCql().add(Cat.class).add(Pig.class).autoCreateDrop().get();
|
||||
cat = Helenus.dsl(Cat.class);
|
||||
pig = Helenus.dsl(Pig.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPrint() {
|
||||
System.out.println(cat);
|
||||
}
|
||||
@Test
|
||||
public void testPrint() {
|
||||
System.out.println(cat);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCounter() throws TimeoutException {
|
||||
@Test
|
||||
public void testCounter() throws TimeoutException {
|
||||
|
||||
session
|
||||
.insert()
|
||||
.value(cat::id, rnd.nextInt())
|
||||
.value(cat::nickname, "garfield")
|
||||
.value(cat::eatable, false)
|
||||
.sync();
|
||||
session
|
||||
.insert()
|
||||
.value(pig::id, rnd.nextInt())
|
||||
.value(pig::nickname, "porky")
|
||||
.value(pig::eatable, true)
|
||||
.sync();
|
||||
session.insert().value(cat::id, rnd.nextInt()).value(cat::nickname, "garfield").value(cat::eatable, false)
|
||||
.sync();
|
||||
session.insert().value(pig::id, rnd.nextInt()).value(pig::nickname, "porky").value(pig::eatable, true).sync();
|
||||
|
||||
Optional<Cat> animal =
|
||||
session.<Cat>select(Cat.class).where(cat::nickname, eq("garfield")).sync().findFirst();
|
||||
Assert.assertTrue(animal.isPresent());
|
||||
Assert.assertTrue(animal.get().warmBlodded());
|
||||
Assert.assertFalse(animal.get().eatable());
|
||||
}
|
||||
Optional<Cat> animal = session.<Cat>select(Cat.class).where(cat::nickname, eq("garfield")).sync().findFirst();
|
||||
Assert.assertTrue(animal.isPresent());
|
||||
Assert.assertTrue(animal.get().warmBlodded());
|
||||
Assert.assertFalse(animal.get().eatable());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDefaultMethod() throws TimeoutException {
|
||||
session
|
||||
.insert()
|
||||
.value(cat::id, rnd.nextInt())
|
||||
.value(cat::nickname, "garfield")
|
||||
.value(cat::eatable, false)
|
||||
.sync();
|
||||
Optional<Cat> animal =
|
||||
session.select(Cat.class).where(cat::nickname, eq("garfield")).single().sync();
|
||||
Assert.assertTrue(animal.isPresent());
|
||||
@Test
|
||||
public void testDefaultMethod() throws TimeoutException {
|
||||
session.insert().value(cat::id, rnd.nextInt()).value(cat::nickname, "garfield").value(cat::eatable, false)
|
||||
.sync();
|
||||
Optional<Cat> animal = session.select(Cat.class).where(cat::nickname, eq("garfield")).single().sync();
|
||||
Assert.assertTrue(animal.isPresent());
|
||||
|
||||
Cat cat = animal.get();
|
||||
Animal itsme = cat.me();
|
||||
Assert.assertEquals(cat, itsme);
|
||||
}
|
||||
Cat cat = animal.get();
|
||||
Animal itsme = cat.me();
|
||||
Assert.assertEquals(cat, itsme);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@ import net.helenus.mapping.annotation.InheritedTable;
|
|||
@InheritedTable
|
||||
public interface Mammal extends Animal {
|
||||
|
||||
default boolean warmBlodded() {
|
||||
return true;
|
||||
}
|
||||
default boolean warmBlodded() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,6 +21,6 @@ import net.helenus.mapping.annotation.Table;
|
|||
@Table("pigs")
|
||||
public interface Pig extends Mammal {
|
||||
|
||||
@Column(ordinal = 0)
|
||||
String nickname();
|
||||
@Column(ordinal = 0)
|
||||
String nickname();
|
||||
}
|
||||
|
|
|
@ -23,13 +23,13 @@ import net.helenus.mapping.annotation.Table;
|
|||
@Table("books")
|
||||
public interface Book {
|
||||
|
||||
@PartitionKey(ordinal = 0)
|
||||
long id();
|
||||
@PartitionKey(ordinal = 0)
|
||||
long id();
|
||||
|
||||
@Column(ordinal = 1)
|
||||
@Index
|
||||
String isbn();
|
||||
@Column(ordinal = 1)
|
||||
@Index
|
||||
String isbn();
|
||||
|
||||
@Column(ordinal = 2)
|
||||
String author();
|
||||
@Column(ordinal = 2)
|
||||
String author();
|
||||
}
|
||||
|
|
|
@ -16,39 +16,35 @@
|
|||
package net.helenus.test.integration.core.index;
|
||||
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.core.Query;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.core.Query;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
|
||||
public class SecondaryIndexTest extends AbstractEmbeddedCassandraTest {
|
||||
|
||||
Book book;
|
||||
Book book;
|
||||
|
||||
HelenusSession session;
|
||||
HelenusSession session;
|
||||
|
||||
@Before
|
||||
public void beforeTest() {
|
||||
session = Helenus.init(getSession()).showCql().add(Book.class).autoCreateDrop().get();
|
||||
book = Helenus.dsl(Book.class, session.getMetadata());
|
||||
}
|
||||
@Before
|
||||
public void beforeTest() {
|
||||
session = Helenus.init(getSession()).showCql().add(Book.class).autoCreateDrop().get();
|
||||
book = Helenus.dsl(Book.class, session.getMetadata());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void test() throws TimeoutException {
|
||||
@Test
|
||||
public void test() throws TimeoutException {
|
||||
|
||||
session
|
||||
.insert()
|
||||
.value(book::id, 123L)
|
||||
.value(book::isbn, "ABC")
|
||||
.value(book::author, "Alex")
|
||||
.sync();
|
||||
session.insert().value(book::id, 123L).value(book::isbn, "ABC").value(book::author, "Alex").sync();
|
||||
|
||||
long actualId =
|
||||
session.select(book::id).where(book::isbn, Query.eq("ABC")).sync().findFirst().get()._1;
|
||||
long actualId = session.select(book::id).where(book::isbn, Query.eq("ABC")).sync().findFirst().get()._1;
|
||||
|
||||
Assert.assertEquals(123L, actualId);
|
||||
}
|
||||
Assert.assertEquals(123L, actualId);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
package net.helenus.test.integration.core.prepared;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
|
||||
import net.helenus.core.annotation.Cacheable;
|
||||
import net.helenus.mapping.annotation.PartitionKey;
|
||||
import net.helenus.mapping.annotation.Table;
|
||||
|
@ -24,13 +25,13 @@ import net.helenus.mapping.annotation.Table;
|
|||
@Cacheable
|
||||
public interface Car {
|
||||
|
||||
@PartitionKey(ordinal = 0)
|
||||
String make();
|
||||
@PartitionKey(ordinal = 0)
|
||||
String make();
|
||||
|
||||
@PartitionKey(ordinal = 1)
|
||||
String model();
|
||||
@PartitionKey(ordinal = 1)
|
||||
String model();
|
||||
|
||||
int year();
|
||||
int year();
|
||||
|
||||
BigDecimal price();
|
||||
BigDecimal price();
|
||||
}
|
||||
|
|
|
@ -15,8 +15,14 @@
|
|||
*/
|
||||
package net.helenus.test.integration.core.prepared;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import java.math.BigDecimal;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.core.Query;
|
||||
|
@ -24,115 +30,86 @@ import net.helenus.core.operation.PreparedOperation;
|
|||
import net.helenus.core.operation.PreparedStreamOperation;
|
||||
import net.helenus.support.Fun;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
import org.junit.Assert;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
public class PreparedStatementTest extends AbstractEmbeddedCassandraTest {
|
||||
|
||||
static Car car;
|
||||
static Car car;
|
||||
|
||||
static HelenusSession session;
|
||||
static HelenusSession session;
|
||||
|
||||
static PreparedOperation<ResultSet> insertOp;
|
||||
static PreparedOperation<ResultSet> insertOp;
|
||||
|
||||
static PreparedOperation<ResultSet> updateOp;
|
||||
static PreparedOperation<ResultSet> updateOp;
|
||||
|
||||
static PreparedStreamOperation<Car> selectOp;
|
||||
static PreparedStreamOperation<Car> selectOp;
|
||||
|
||||
static PreparedStreamOperation<Fun.Tuple1<BigDecimal>> selectPriceOp;
|
||||
static PreparedStreamOperation<Fun.Tuple1<BigDecimal>> selectPriceOp;
|
||||
|
||||
static PreparedOperation<ResultSet> deleteOp;
|
||||
static PreparedOperation<ResultSet> deleteOp;
|
||||
|
||||
static PreparedOperation<Long> countOp;
|
||||
static PreparedOperation<Long> countOp;
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeTest() {
|
||||
@BeforeClass
|
||||
public static void beforeTest() {
|
||||
|
||||
session = Helenus.init(getSession()).showCql().add(Car.class).autoCreateDrop().get();
|
||||
car = Helenus.dsl(Car.class, session.getMetadata());
|
||||
session = Helenus.init(getSession()).showCql().add(Car.class).autoCreateDrop().get();
|
||||
car = Helenus.dsl(Car.class, session.getMetadata());
|
||||
|
||||
insertOp =
|
||||
session
|
||||
.<ResultSet>insert()
|
||||
.value(car::make, Query.marker())
|
||||
.value(car::model, Query.marker())
|
||||
.value(car::year, 2004)
|
||||
.prepare();
|
||||
insertOp = session.<ResultSet>insert().value(car::make, Query.marker()).value(car::model, Query.marker())
|
||||
.value(car::year, 2004).prepare();
|
||||
|
||||
updateOp =
|
||||
session
|
||||
.update()
|
||||
.set(car::price, Query.marker())
|
||||
.where(car::make, Query.eq(Query.marker()))
|
||||
.and(car::model, Query.eq(Query.marker()))
|
||||
.prepare();
|
||||
updateOp = session.update().set(car::price, Query.marker()).where(car::make, Query.eq(Query.marker()))
|
||||
.and(car::model, Query.eq(Query.marker())).prepare();
|
||||
|
||||
selectOp =
|
||||
session
|
||||
.<Car>select(car)
|
||||
.where(car::make, Query.eq(Query.marker()))
|
||||
.and(car::model, Query.eq(Query.marker()))
|
||||
.prepare();
|
||||
selectOp = session.<Car>select(car).where(car::make, Query.eq(Query.marker()))
|
||||
.and(car::model, Query.eq(Query.marker())).prepare();
|
||||
|
||||
selectPriceOp =
|
||||
session
|
||||
.select(car::price)
|
||||
.where(car::make, Query.eq(Query.marker()))
|
||||
.and(car::model, Query.eq(Query.marker()))
|
||||
.prepare();
|
||||
selectPriceOp = session.select(car::price).where(car::make, Query.eq(Query.marker()))
|
||||
.and(car::model, Query.eq(Query.marker())).prepare();
|
||||
|
||||
deleteOp =
|
||||
session
|
||||
.delete()
|
||||
.where(car::make, Query.eq(Query.marker()))
|
||||
.and(car::model, Query.eq(Query.marker()))
|
||||
.prepare();
|
||||
deleteOp = session.delete().where(car::make, Query.eq(Query.marker())).and(car::model, Query.eq(Query.marker()))
|
||||
.prepare();
|
||||
|
||||
countOp =
|
||||
session
|
||||
.count()
|
||||
.where(car::make, Query.eq(Query.marker()))
|
||||
.and(car::model, Query.eq(Query.marker()))
|
||||
.prepare();
|
||||
}
|
||||
countOp = session.count().where(car::make, Query.eq(Query.marker())).and(car::model, Query.eq(Query.marker()))
|
||||
.prepare();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPrint() {
|
||||
System.out.println(car);
|
||||
}
|
||||
@Test
|
||||
public void testPrint() {
|
||||
System.out.println(car);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCRUID() throws Exception {
|
||||
@Test
|
||||
public void testCRUID() throws Exception {
|
||||
|
||||
// INSERT
|
||||
// INSERT
|
||||
|
||||
insertOp.bind("Nissan", "350Z").sync();
|
||||
insertOp.bind("Nissan", "350Z").sync();
|
||||
|
||||
// SELECT
|
||||
// SELECT
|
||||
|
||||
Car actual = selectOp.bind("Nissan", "350Z").sync().findFirst().get();
|
||||
Assert.assertEquals("Nissan", actual.make());
|
||||
Assert.assertEquals("350Z", actual.model());
|
||||
Assert.assertEquals(2004, actual.year());
|
||||
Assert.assertNull(actual.price());
|
||||
Car actual = selectOp.bind("Nissan", "350Z").sync().findFirst().get();
|
||||
Assert.assertEquals("Nissan", actual.make());
|
||||
Assert.assertEquals("350Z", actual.model());
|
||||
Assert.assertEquals(2004, actual.year());
|
||||
Assert.assertNull(actual.price());
|
||||
|
||||
// UPDATE
|
||||
// UPDATE
|
||||
|
||||
updateOp.bind(BigDecimal.valueOf(10000.0), "Nissan", "350Z").sync();
|
||||
updateOp.bind(BigDecimal.valueOf(10000.0), "Nissan", "350Z").sync();
|
||||
|
||||
BigDecimal price = selectPriceOp.bind("Nissan", "350Z").sync().findFirst().get()._1;
|
||||
BigDecimal price = selectPriceOp.bind("Nissan", "350Z").sync().findFirst().get()._1;
|
||||
|
||||
Assert.assertEquals(BigDecimal.valueOf(10000.0), price);
|
||||
Assert.assertEquals(BigDecimal.valueOf(10000.0), price);
|
||||
|
||||
// DELETE
|
||||
// DELETE
|
||||
|
||||
Long cnt = countOp.bind("Nissan", "350Z").sync();
|
||||
Assert.assertEquals(Long.valueOf(1), cnt);
|
||||
Long cnt = countOp.bind("Nissan", "350Z").sync();
|
||||
Assert.assertEquals(Long.valueOf(1), cnt);
|
||||
|
||||
deleteOp.bind("Nissan", "350Z").sync();
|
||||
deleteOp.bind("Nissan", "350Z").sync();
|
||||
|
||||
cnt = countOp.bind("Nissan", "350Z").sync();
|
||||
Assert.assertEquals(Long.valueOf(0), cnt);
|
||||
}
|
||||
cnt = countOp.bind("Nissan", "350Z").sync();
|
||||
Assert.assertEquals(Long.valueOf(0), cnt);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,48 +18,49 @@ package net.helenus.test.integration.core.simple;
|
|||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Random;
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.core.operation.InsertOperation;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.core.operation.InsertOperation;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
|
||||
public class InsertPartialTest extends AbstractEmbeddedCassandraTest {
|
||||
|
||||
static HelenusSession session;
|
||||
static User user;
|
||||
static Random rnd = new Random();
|
||||
static HelenusSession session;
|
||||
static User user;
|
||||
static Random rnd = new Random();
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeTests() {
|
||||
session = Helenus.init(getSession()).showCql().add(User.class).autoCreateDrop().get();
|
||||
user = Helenus.dsl(User.class);
|
||||
}
|
||||
@BeforeClass
|
||||
public static void beforeTests() {
|
||||
session = Helenus.init(getSession()).showCql().add(User.class).autoCreateDrop().get();
|
||||
user = Helenus.dsl(User.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPartialInsert() throws Exception {
|
||||
Map<String, Object> map = new HashMap<String, Object>();
|
||||
Long id = rnd.nextLong();
|
||||
map.put("id", id);
|
||||
map.put("age", 5);
|
||||
InsertOperation<User> insert = session.<User>insert(Helenus.map(User.class, map));
|
||||
String cql =
|
||||
"INSERT INTO simple_users (id,age) VALUES (" + id.toString() + ",5) IF NOT EXISTS;";
|
||||
Assert.assertEquals(cql, insert.cql());
|
||||
insert.sync();
|
||||
}
|
||||
@Test
|
||||
public void testPartialInsert() throws Exception {
|
||||
Map<String, Object> map = new HashMap<String, Object>();
|
||||
Long id = rnd.nextLong();
|
||||
map.put("id", id);
|
||||
map.put("age", 5);
|
||||
InsertOperation<User> insert = session.<User>insert(Helenus.map(User.class, map));
|
||||
String cql = "INSERT INTO simple_users (id,age) VALUES (" + id.toString() + ",5) IF NOT EXISTS;";
|
||||
Assert.assertEquals(cql, insert.cql());
|
||||
insert.sync();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPartialUpsert() throws Exception {
|
||||
Map<String, Object> map = new HashMap<String, Object>();
|
||||
Long id = rnd.nextLong();
|
||||
map.put("id", id);
|
||||
map.put("age", 5);
|
||||
InsertOperation upsert = session.upsert(Helenus.map(User.class, map));
|
||||
String cql = "INSERT INTO simple_users (id,age) VALUES (" + id.toString() + ",5);";
|
||||
Assert.assertEquals(cql, upsert.cql());
|
||||
upsert.sync();
|
||||
}
|
||||
@Test
|
||||
public void testPartialUpsert() throws Exception {
|
||||
Map<String, Object> map = new HashMap<String, Object>();
|
||||
Long id = rnd.nextLong();
|
||||
map.put("id", id);
|
||||
map.put("age", 5);
|
||||
InsertOperation upsert = session.upsert(Helenus.map(User.class, map));
|
||||
String cql = "INSERT INTO simple_users (id,age) VALUES (" + id.toString() + ",5);";
|
||||
Assert.assertEquals(cql, upsert.cql());
|
||||
upsert.sync();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,29 +16,25 @@
|
|||
package net.helenus.test.integration.core.simple;
|
||||
|
||||
import java.util.Date;
|
||||
import net.helenus.mapping.annotation.ClusteringColumn;
|
||||
import net.helenus.mapping.annotation.Column;
|
||||
import net.helenus.mapping.annotation.PartitionKey;
|
||||
import net.helenus.mapping.annotation.StaticColumn;
|
||||
import net.helenus.mapping.annotation.Table;
|
||||
import net.helenus.mapping.annotation.Types;
|
||||
|
||||
import net.helenus.mapping.annotation.*;
|
||||
|
||||
@Table
|
||||
public interface Message {
|
||||
|
||||
@PartitionKey
|
||||
int id();
|
||||
@PartitionKey
|
||||
int id();
|
||||
|
||||
@ClusteringColumn
|
||||
@Types.Timeuuid
|
||||
Date timestamp();
|
||||
@ClusteringColumn
|
||||
@Types.Timeuuid
|
||||
Date timestamp();
|
||||
|
||||
@StaticColumn(forceQuote = true)
|
||||
String from();
|
||||
@StaticColumn(forceQuote = true)
|
||||
String from();
|
||||
|
||||
@Column(forceQuote = true)
|
||||
String to();
|
||||
@Column(forceQuote = true)
|
||||
String to();
|
||||
|
||||
@Column
|
||||
String message();
|
||||
@Column
|
||||
String message();
|
||||
}
|
||||
|
|
|
@ -17,242 +17,173 @@ package net.helenus.test.integration.core.simple;
|
|||
|
||||
import static net.helenus.core.Query.eq;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import java.util.*;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.core.Operator;
|
||||
import net.helenus.core.operation.UpdateOperation;
|
||||
import net.helenus.support.Fun;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
import org.junit.Assert;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
public class SimpleUserTest extends AbstractEmbeddedCassandraTest {
|
||||
|
||||
static User user;
|
||||
static User user;
|
||||
|
||||
static HelenusSession session;
|
||||
static HelenusSession session;
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeTest() {
|
||||
session = Helenus.init(getSession()).showCql().add(User.class).autoCreateDrop().get();
|
||||
user = Helenus.dsl(User.class, session.getMetadata());
|
||||
}
|
||||
@BeforeClass
|
||||
public static void beforeTest() {
|
||||
session = Helenus.init(getSession()).showCql().add(User.class).autoCreateDrop().get();
|
||||
user = Helenus.dsl(User.class, session.getMetadata());
|
||||
}
|
||||
|
||||
public static class UserImpl implements User {
|
||||
@Test
|
||||
public void testCruid() throws Exception {
|
||||
|
||||
Long id;
|
||||
String name;
|
||||
Integer age;
|
||||
UserType type;
|
||||
UserImpl newUser = new UserImpl();
|
||||
newUser.id = 100L;
|
||||
newUser.name = "alex";
|
||||
newUser.age = 34;
|
||||
newUser.type = UserType.USER;
|
||||
|
||||
@Override
|
||||
public Long id() {
|
||||
return id;
|
||||
}
|
||||
// CREATE
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return name;
|
||||
}
|
||||
session.upsert(newUser).sync();
|
||||
|
||||
@Override
|
||||
public Integer age() {
|
||||
return age;
|
||||
}
|
||||
// READ
|
||||
|
||||
@Override
|
||||
public UserType type() {
|
||||
return type;
|
||||
}
|
||||
}
|
||||
// select row and map to entity
|
||||
|
||||
@Test
|
||||
public void testCruid() throws Exception {
|
||||
User actual = session.selectAll(User.class).mapTo(User.class).where(user::id, eq(100L)).sync().findFirst()
|
||||
.get();
|
||||
assertUsers(newUser, actual);
|
||||
|
||||
UserImpl newUser = new UserImpl();
|
||||
newUser.id = 100L;
|
||||
newUser.name = "alex";
|
||||
newUser.age = 34;
|
||||
newUser.type = UserType.USER;
|
||||
// select as object
|
||||
|
||||
// CREATE
|
||||
actual = session.<User>select(user).where(user::id, eq(100L)).single().sync().orElse(null);
|
||||
assertUsers(newUser, actual);
|
||||
|
||||
session.upsert(newUser).sync();
|
||||
// select by columns
|
||||
|
||||
// READ
|
||||
actual = session.select().column(user::id).column(user::name).column(user::age).column(user::type)
|
||||
.mapTo(User.class).where(user::id, eq(100L)).sync().findFirst().get();
|
||||
assertUsers(newUser, actual);
|
||||
|
||||
// select row and map to entity
|
||||
// select by columns
|
||||
|
||||
User actual =
|
||||
session
|
||||
.selectAll(User.class)
|
||||
.mapTo(User.class)
|
||||
.where(user::id, eq(100L))
|
||||
.sync()
|
||||
.findFirst()
|
||||
.get();
|
||||
assertUsers(newUser, actual);
|
||||
actual = session.select(User.class).mapTo(User.class).where(user::id, eq(100L)).sync().findFirst().get();
|
||||
assertUsers(newUser, actual);
|
||||
|
||||
// select as object
|
||||
// select as object and mapTo
|
||||
|
||||
actual = session.<User>select(user).where(user::id, eq(100L)).single().sync().orElse(null);
|
||||
assertUsers(newUser, actual);
|
||||
actual = session.select(user::id, user::name, user::age, user::type).mapTo(User.class).where(user::id, eq(100L))
|
||||
.sync().findFirst().get();
|
||||
assertUsers(newUser, actual);
|
||||
|
||||
// select by columns
|
||||
// select single column
|
||||
|
||||
actual =
|
||||
session
|
||||
.select()
|
||||
.column(user::id)
|
||||
.column(user::name)
|
||||
.column(user::age)
|
||||
.column(user::type)
|
||||
.mapTo(User.class)
|
||||
.where(user::id, eq(100L))
|
||||
.sync()
|
||||
.findFirst()
|
||||
.get();
|
||||
assertUsers(newUser, actual);
|
||||
String name = session.select(user::name).where(user::id, eq(100L)).sync().findFirst().get()._1;
|
||||
|
||||
// select by columns
|
||||
Assert.assertEquals(newUser.name(), name);
|
||||
|
||||
actual =
|
||||
session
|
||||
.select(User.class)
|
||||
.mapTo(User.class)
|
||||
.where(user::id, eq(100L))
|
||||
.sync()
|
||||
.findFirst()
|
||||
.get();
|
||||
assertUsers(newUser, actual);
|
||||
// select single column in array tuple
|
||||
|
||||
// select as object and mapTo
|
||||
name = (String) session.select().column(user::name).where(user::id, eq(100L)).sync().findFirst().get()._a[0];
|
||||
|
||||
actual =
|
||||
session
|
||||
.select(user::id, user::name, user::age, user::type)
|
||||
.mapTo(User.class)
|
||||
.where(user::id, eq(100L))
|
||||
.sync()
|
||||
.findFirst()
|
||||
.get();
|
||||
assertUsers(newUser, actual);
|
||||
Assert.assertEquals(newUser.name(), name);
|
||||
|
||||
// select single column
|
||||
// UPDATE
|
||||
|
||||
String name = session.select(user::name).where(user::id, eq(100L)).sync().findFirst().get()._1;
|
||||
session.update(user::name, "albert").set(user::age, 35).where(user::id, Operator.EQ, 100L).sync();
|
||||
|
||||
Assert.assertEquals(newUser.name(), name);
|
||||
long cnt = session.count(user).where(user::id, Operator.EQ, 100L).sync();
|
||||
Assert.assertEquals(1L, cnt);
|
||||
|
||||
// select single column in array tuple
|
||||
name = session.select(user::name).where(user::id, Operator.EQ, 100L).map(t -> "_" + t._1).sync().findFirst()
|
||||
.get();
|
||||
|
||||
name =
|
||||
(String)
|
||||
session
|
||||
.select()
|
||||
.column(user::name)
|
||||
.where(user::id, eq(100L))
|
||||
.sync()
|
||||
.findFirst()
|
||||
.get()
|
||||
._a[0];
|
||||
Assert.assertEquals("_albert", name);
|
||||
|
||||
Assert.assertEquals(newUser.name(), name);
|
||||
User u2 = session.<User>select(user).where(user::id, eq(100L)).single().sync().orElse(null);
|
||||
|
||||
// UPDATE
|
||||
Assert.assertEquals(Long.valueOf(100L), u2.id());
|
||||
Assert.assertEquals("albert", u2.name());
|
||||
Assert.assertEquals(Integer.valueOf(35), u2.age());
|
||||
|
||||
session
|
||||
.update(user::name, "albert")
|
||||
.set(user::age, 35)
|
||||
.where(user::id, Operator.EQ, 100L)
|
||||
.sync();
|
||||
//
|
||||
User greg = session.<User>insert(user).value(user::name, "greg").value(user::age, 44)
|
||||
.value(user::type, UserType.USER).value(user::id, 1234L).sync();
|
||||
|
||||
long cnt = session.count(user).where(user::id, Operator.EQ, 100L).sync();
|
||||
Assert.assertEquals(1L, cnt);
|
||||
Optional<User> maybeGreg = session.<User>select(user).where(user::id, eq(1234L)).single().sync();
|
||||
|
||||
name =
|
||||
session
|
||||
.select(user::name)
|
||||
.where(user::id, Operator.EQ, 100L)
|
||||
.map(t -> "_" + t._1)
|
||||
.sync()
|
||||
.findFirst()
|
||||
.get();
|
||||
// INSERT
|
||||
|
||||
Assert.assertEquals("_albert", name);
|
||||
session.update().set(user::name, null).set(user::age, null).set(user::type, null).where(user::id, eq(100L))
|
||||
.zipkinContext(null).sync();
|
||||
|
||||
User u2 = session.<User>select(user).where(user::id, eq(100L)).single().sync().orElse(null);
|
||||
Fun.Tuple3<String, Integer, UserType> tuple = session.select(user::name, user::age, user::type)
|
||||
.where(user::id, eq(100L)).sync().findFirst().get();
|
||||
|
||||
Assert.assertEquals(Long.valueOf(100L), u2.id());
|
||||
Assert.assertEquals("albert", u2.name());
|
||||
Assert.assertEquals(Integer.valueOf(35), u2.age());
|
||||
Assert.assertNull(tuple._1);
|
||||
Assert.assertNull(tuple._2);
|
||||
Assert.assertNull(tuple._3);
|
||||
|
||||
//
|
||||
User greg =
|
||||
session
|
||||
.<User>insert(user)
|
||||
.value(user::name, "greg")
|
||||
.value(user::age, 44)
|
||||
.value(user::type, UserType.USER)
|
||||
.value(user::id, 1234L)
|
||||
.sync();
|
||||
// DELETE
|
||||
|
||||
Optional<User> maybeGreg =
|
||||
session.<User>select(user).where(user::id, eq(1234L)).single().sync();
|
||||
session.delete(user).where(user::id, eq(100L)).sync();
|
||||
|
||||
// INSERT
|
||||
cnt = session.select().count().where(user::id, eq(100L)).sync();
|
||||
Assert.assertEquals(0L, cnt);
|
||||
}
|
||||
|
||||
session
|
||||
.update()
|
||||
.set(user::name, null)
|
||||
.set(user::age, null)
|
||||
.set(user::type, null)
|
||||
.where(user::id, eq(100L))
|
||||
.zipkinContext(null)
|
||||
.sync();
|
||||
public void testZipkin() throws TimeoutException {
|
||||
session.update().set(user::name, null).set(user::age, null).set(user::type, null).where(user::id, eq(100L))
|
||||
.zipkinContext(null).sync();
|
||||
|
||||
Fun.Tuple3<String, Integer, UserType> tuple =
|
||||
session
|
||||
.select(user::name, user::age, user::type)
|
||||
.where(user::id, eq(100L))
|
||||
.sync()
|
||||
.findFirst()
|
||||
.get();
|
||||
UpdateOperation<ResultSet> update = session.update();
|
||||
update.set(user::name, null).zipkinContext(null).sync();
|
||||
}
|
||||
|
||||
Assert.assertNull(tuple._1);
|
||||
Assert.assertNull(tuple._2);
|
||||
Assert.assertNull(tuple._3);
|
||||
private void assertUsers(User expected, User actual) {
|
||||
Assert.assertEquals(expected.id(), actual.id());
|
||||
Assert.assertEquals(expected.name(), actual.name());
|
||||
Assert.assertEquals(expected.age(), actual.age());
|
||||
Assert.assertEquals(expected.type(), actual.type());
|
||||
}
|
||||
|
||||
// DELETE
|
||||
public static class UserImpl implements User {
|
||||
|
||||
session.delete(user).where(user::id, eq(100L)).sync();
|
||||
Long id;
|
||||
String name;
|
||||
Integer age;
|
||||
UserType type;
|
||||
|
||||
cnt = session.select().count().where(user::id, eq(100L)).sync();
|
||||
Assert.assertEquals(0L, cnt);
|
||||
}
|
||||
@Override
|
||||
public Long id() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void testZipkin() throws TimeoutException {
|
||||
session
|
||||
.update()
|
||||
.set(user::name, null)
|
||||
.set(user::age, null)
|
||||
.set(user::type, null)
|
||||
.where(user::id, eq(100L))
|
||||
.zipkinContext(null)
|
||||
.sync();
|
||||
@Override
|
||||
public String name() {
|
||||
return name;
|
||||
}
|
||||
|
||||
UpdateOperation<ResultSet> update = session.update();
|
||||
update.set(user::name, null).zipkinContext(null).sync();
|
||||
}
|
||||
@Override
|
||||
public Integer age() {
|
||||
return age;
|
||||
}
|
||||
|
||||
private void assertUsers(User expected, User actual) {
|
||||
Assert.assertEquals(expected.id(), actual.id());
|
||||
Assert.assertEquals(expected.name(), actual.name());
|
||||
Assert.assertEquals(expected.age(), actual.age());
|
||||
Assert.assertEquals(expected.type(), actual.type());
|
||||
}
|
||||
@Override
|
||||
public UserType type() {
|
||||
return type;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,152 +19,129 @@ import java.util.Date;
|
|||
import java.util.List;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.stream.Collectors;
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.core.Query;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.core.Query;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
|
||||
public class StaticColumnTest extends AbstractEmbeddedCassandraTest {
|
||||
|
||||
static HelenusSession session;
|
||||
static Message message;
|
||||
static HelenusSession session;
|
||||
static Message message;
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeTest() {
|
||||
session =
|
||||
Helenus.init(getSession())
|
||||
.showCql()
|
||||
.addPackage(Message.class.getPackage().getName())
|
||||
.autoCreateDrop()
|
||||
.get();
|
||||
message = Helenus.dsl(Message.class, session.getMetadata());
|
||||
}
|
||||
@BeforeClass
|
||||
public static void beforeTest() {
|
||||
session = Helenus.init(getSession()).showCql().addPackage(Message.class.getPackage().getName()).autoCreateDrop()
|
||||
.get();
|
||||
message = Helenus.dsl(Message.class, session.getMetadata());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPrint() {
|
||||
System.out.println(message);
|
||||
}
|
||||
@Test
|
||||
public void testPrint() {
|
||||
System.out.println(message);
|
||||
}
|
||||
|
||||
private static class MessageImpl implements Message {
|
||||
@Test
|
||||
public void testCRUID() throws TimeoutException {
|
||||
|
||||
int id;
|
||||
Date timestamp;
|
||||
String from;
|
||||
String to;
|
||||
String msg;
|
||||
MessageImpl msg = new MessageImpl();
|
||||
msg.id = 123;
|
||||
msg.timestamp = new Date();
|
||||
msg.from = "Alex";
|
||||
msg.to = "Bob";
|
||||
msg.msg = "hi";
|
||||
|
||||
@Override
|
||||
public int id() {
|
||||
return id;
|
||||
}
|
||||
// CREATE
|
||||
|
||||
@Override
|
||||
public Date timestamp() {
|
||||
return timestamp;
|
||||
}
|
||||
session.insert(msg).sync();
|
||||
|
||||
@Override
|
||||
public String from() {
|
||||
return from;
|
||||
}
|
||||
msg.id = 123;
|
||||
msg.to = "Craig";
|
||||
|
||||
@Override
|
||||
public String to() {
|
||||
return to;
|
||||
}
|
||||
session.insert(msg).sync();
|
||||
|
||||
@Override
|
||||
public String message() {
|
||||
return msg;
|
||||
}
|
||||
}
|
||||
// READ
|
||||
|
||||
@Test
|
||||
public void testCRUID() throws TimeoutException {
|
||||
List<Message> actual = session.<Message>select(message).where(message::id, Query.eq(123)).sync()
|
||||
.collect(Collectors.toList());
|
||||
|
||||
MessageImpl msg = new MessageImpl();
|
||||
msg.id = 123;
|
||||
msg.timestamp = new Date();
|
||||
msg.from = "Alex";
|
||||
msg.to = "Bob";
|
||||
msg.msg = "hi";
|
||||
Assert.assertEquals(2, actual.size());
|
||||
|
||||
// CREATE
|
||||
Message toCraig = actual.stream().filter(m -> m.to().equals("Craig")).findFirst().get();
|
||||
assertMessages(msg, toCraig);
|
||||
|
||||
session.insert(msg).sync();
|
||||
// UPDATE
|
||||
|
||||
msg.id = 123;
|
||||
msg.to = "Craig";
|
||||
session.update().set(message::from, "Albert").where(message::id, Query.eq(123))
|
||||
.onlyIf(message::from, Query.eq("Alex")).sync();
|
||||
|
||||
session.insert(msg).sync();
|
||||
long cnt = session.select(message::from).where(message::id, Query.eq(123)).sync()
|
||||
.filter(t -> t._1.equals("Albert")).count();
|
||||
|
||||
// READ
|
||||
Assert.assertEquals(2, cnt);
|
||||
|
||||
List<Message> actual =
|
||||
session
|
||||
.<Message>select(message)
|
||||
.where(message::id, Query.eq(123))
|
||||
.sync()
|
||||
.collect(Collectors.toList());
|
||||
// INSERT
|
||||
|
||||
Assert.assertEquals(2, actual.size());
|
||||
session.update().set(message::from, null).where(message::id, Query.eq(123)).sync();
|
||||
|
||||
Message toCraig = actual.stream().filter(m -> m.to().equals("Craig")).findFirst().get();
|
||||
assertMessages(msg, toCraig);
|
||||
session.select(message::from).where(message::id, Query.eq(123)).sync().map(t -> t._1)
|
||||
.forEach(Assert::assertNull);
|
||||
|
||||
// UPDATE
|
||||
session.update().set(message::from, "Alex").where(message::id, Query.eq(123))
|
||||
.onlyIf(message::from, Query.eq(null)).sync();
|
||||
|
||||
session
|
||||
.update()
|
||||
.set(message::from, "Albert")
|
||||
.where(message::id, Query.eq(123))
|
||||
.onlyIf(message::from, Query.eq("Alex"))
|
||||
.sync();
|
||||
// DELETE
|
||||
|
||||
long cnt =
|
||||
session
|
||||
.select(message::from)
|
||||
.where(message::id, Query.eq(123))
|
||||
.sync()
|
||||
.filter(t -> t._1.equals("Albert"))
|
||||
.count();
|
||||
session.delete().where(message::id, Query.eq(123)).sync();
|
||||
|
||||
Assert.assertEquals(2, cnt);
|
||||
cnt = session.count().where(message::id, Query.eq(123)).sync();
|
||||
Assert.assertEquals(0, cnt);
|
||||
}
|
||||
|
||||
// INSERT
|
||||
private void assertMessages(Message expected, Message actual) {
|
||||
Assert.assertEquals(expected.id(), actual.id());
|
||||
Assert.assertEquals(expected.from(), actual.from());
|
||||
Assert.assertEquals(expected.timestamp(), actual.timestamp());
|
||||
Assert.assertEquals(expected.to(), actual.to());
|
||||
Assert.assertEquals(expected.message(), actual.message());
|
||||
}
|
||||
|
||||
session.update().set(message::from, null).where(message::id, Query.eq(123)).sync();
|
||||
private static class MessageImpl implements Message {
|
||||
|
||||
session
|
||||
.select(message::from)
|
||||
.where(message::id, Query.eq(123))
|
||||
.sync()
|
||||
.map(t -> t._1)
|
||||
.forEach(Assert::assertNull);
|
||||
int id;
|
||||
Date timestamp;
|
||||
String from;
|
||||
String to;
|
||||
String msg;
|
||||
|
||||
session
|
||||
.update()
|
||||
.set(message::from, "Alex")
|
||||
.where(message::id, Query.eq(123))
|
||||
.onlyIf(message::from, Query.eq(null))
|
||||
.sync();
|
||||
@Override
|
||||
public int id() {
|
||||
return id;
|
||||
}
|
||||
|
||||
// DELETE
|
||||
@Override
|
||||
public Date timestamp() {
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
session.delete().where(message::id, Query.eq(123)).sync();
|
||||
@Override
|
||||
public String from() {
|
||||
return from;
|
||||
}
|
||||
|
||||
cnt = session.count().where(message::id, Query.eq(123)).sync();
|
||||
Assert.assertEquals(0, cnt);
|
||||
}
|
||||
@Override
|
||||
public String to() {
|
||||
return to;
|
||||
}
|
||||
|
||||
private void assertMessages(Message expected, Message actual) {
|
||||
Assert.assertEquals(expected.id(), actual.id());
|
||||
Assert.assertEquals(expected.from(), actual.from());
|
||||
Assert.assertEquals(expected.timestamp(), actual.timestamp());
|
||||
Assert.assertEquals(expected.to(), actual.to());
|
||||
Assert.assertEquals(expected.message(), actual.message());
|
||||
}
|
||||
@Override
|
||||
public String message() {
|
||||
return msg;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,7 +15,6 @@
|
|||
*/
|
||||
package net.helenus.test.integration.core.simple;
|
||||
|
||||
import net.helenus.core.annotation.Cacheable;
|
||||
import net.helenus.mapping.annotation.Column;
|
||||
import net.helenus.mapping.annotation.PartitionKey;
|
||||
import net.helenus.mapping.annotation.Table;
|
||||
|
@ -23,14 +22,14 @@ import net.helenus.mapping.annotation.Table;
|
|||
@Table("simple_users")
|
||||
public interface User {
|
||||
|
||||
@PartitionKey
|
||||
Long id();
|
||||
@PartitionKey
|
||||
Long id();
|
||||
|
||||
@Username
|
||||
@Column("override_name")
|
||||
String name();
|
||||
@Username
|
||||
@Column("override_name")
|
||||
String name();
|
||||
|
||||
Integer age();
|
||||
Integer age();
|
||||
|
||||
UserType type();
|
||||
UserType type();
|
||||
}
|
||||
|
|
|
@ -16,6 +16,5 @@
|
|||
package net.helenus.test.integration.core.simple;
|
||||
|
||||
public enum UserType {
|
||||
USER,
|
||||
ADMIN;
|
||||
USER, ADMIN;
|
||||
}
|
||||
|
|
|
@ -15,15 +15,13 @@
|
|||
*/
|
||||
package net.helenus.test.integration.core.simple;
|
||||
|
||||
import java.lang.annotation.Documented;
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
import java.lang.annotation.*;
|
||||
|
||||
import net.helenus.mapping.annotation.Constraints;
|
||||
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
|
||||
@Constraints.LowerCase
|
||||
public @interface Username {}
|
||||
public @interface Username {
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@ package net.helenus.test.integration.core.tuple;
|
|||
|
||||
import com.datastax.driver.core.DataType;
|
||||
import com.datastax.driver.core.TupleValue;
|
||||
|
||||
import net.helenus.mapping.annotation.Column;
|
||||
import net.helenus.mapping.annotation.PartitionKey;
|
||||
import net.helenus.mapping.annotation.Table;
|
||||
|
@ -25,12 +26,12 @@ import net.helenus.mapping.annotation.Types;
|
|||
@Table
|
||||
public interface Album {
|
||||
|
||||
@PartitionKey(ordinal = 1)
|
||||
int id();
|
||||
@PartitionKey(ordinal = 1)
|
||||
int id();
|
||||
|
||||
AlbumInformation info();
|
||||
AlbumInformation info();
|
||||
|
||||
@Types.Tuple({DataType.Name.TEXT, DataType.Name.TEXT})
|
||||
@Column(ordinal = 1)
|
||||
TupleValue infoNoMapping();
|
||||
@Types.Tuple({DataType.Name.TEXT, DataType.Name.TEXT})
|
||||
@Column(ordinal = 1)
|
||||
TupleValue infoNoMapping();
|
||||
}
|
||||
|
|
|
@ -21,9 +21,9 @@ import net.helenus.mapping.annotation.Tuple;
|
|||
@Tuple
|
||||
public interface AlbumInformation {
|
||||
|
||||
@Column(ordinal = 0)
|
||||
String about();
|
||||
@Column(ordinal = 0)
|
||||
String about();
|
||||
|
||||
@Column(ordinal = 1)
|
||||
String place();
|
||||
@Column(ordinal = 1)
|
||||
String place();
|
||||
}
|
||||
|
|
|
@ -15,20 +15,21 @@
|
|||
*/
|
||||
package net.helenus.test.integration.core.tuple;
|
||||
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
|
||||
public class DslTest extends AbstractEmbeddedCassandraTest {
|
||||
|
||||
@Test
|
||||
public void testDslBeforeSessionInit() {
|
||||
Assert.assertNotNull(Helenus.dsl(Album.class));
|
||||
}
|
||||
@Test
|
||||
public void testDslBeforeSessionInit() {
|
||||
Assert.assertNotNull(Helenus.dsl(Album.class));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSessionInitAddingDslProxy() {
|
||||
Assert.assertNotNull(Helenus.init(getSession()).showCql().add(Helenus.dsl(Album.class)));
|
||||
}
|
||||
@Test
|
||||
public void testSessionInitAddingDslProxy() {
|
||||
Assert.assertNotNull(Helenus.init(getSession()).showCql().add(Helenus.dsl(Album.class)));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,129 +16,108 @@
|
|||
package net.helenus.test.integration.core.tuple;
|
||||
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.core.Query;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.core.Query;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
|
||||
public class InnerTupleTest extends AbstractEmbeddedCassandraTest {
|
||||
|
||||
static PhotoAlbum photoAlbum;
|
||||
static PhotoAlbum photoAlbum;
|
||||
|
||||
static HelenusSession session;
|
||||
static HelenusSession session;
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeTest() {
|
||||
session = Helenus.init(getSession()).showCql().add(PhotoAlbum.class).autoCreateDrop().get();
|
||||
photoAlbum = Helenus.dsl(PhotoAlbum.class, session.getMetadata());
|
||||
}
|
||||
@BeforeClass
|
||||
public static void beforeTest() {
|
||||
session = Helenus.init(getSession()).showCql().add(PhotoAlbum.class).autoCreateDrop().get();
|
||||
photoAlbum = Helenus.dsl(PhotoAlbum.class, session.getMetadata());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPrint() {
|
||||
System.out.println(photoAlbum);
|
||||
}
|
||||
@Test
|
||||
public void testPrint() {
|
||||
System.out.println(photoAlbum);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCruid() throws TimeoutException {
|
||||
@Test
|
||||
public void testCruid() throws TimeoutException {
|
||||
|
||||
Photo photo =
|
||||
new Photo() {
|
||||
Photo photo = new Photo() {
|
||||
|
||||
@Override
|
||||
public byte[] blob() {
|
||||
return "jpeg".getBytes();
|
||||
}
|
||||
};
|
||||
@Override
|
||||
public byte[] blob() {
|
||||
return "jpeg".getBytes();
|
||||
}
|
||||
};
|
||||
|
||||
PhotoFolder folder =
|
||||
new PhotoFolder() {
|
||||
PhotoFolder folder = new PhotoFolder() {
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return "first";
|
||||
}
|
||||
@Override
|
||||
public String name() {
|
||||
return "first";
|
||||
}
|
||||
|
||||
@Override
|
||||
public Photo photo() {
|
||||
return photo;
|
||||
}
|
||||
};
|
||||
@Override
|
||||
public Photo photo() {
|
||||
return photo;
|
||||
}
|
||||
};
|
||||
|
||||
// CREATE (C)
|
||||
// CREATE (C)
|
||||
|
||||
session.insert().value(photoAlbum::id, 123).value(photoAlbum::folder, folder).sync();
|
||||
session.insert().value(photoAlbum::id, 123).value(photoAlbum::folder, folder).sync();
|
||||
|
||||
// READ (R)
|
||||
// READ (R)
|
||||
|
||||
PhotoFolder actual =
|
||||
session
|
||||
.select(photoAlbum::folder)
|
||||
.where(photoAlbum::id, Query.eq(123))
|
||||
.sync()
|
||||
.findFirst()
|
||||
.get()
|
||||
._1;
|
||||
PhotoFolder actual = session.select(photoAlbum::folder).where(photoAlbum::id, Query.eq(123)).sync().findFirst()
|
||||
.get()._1;
|
||||
|
||||
Assert.assertEquals(folder.name(), actual.name());
|
||||
Assert.assertEquals(folder.name(), actual.name());
|
||||
|
||||
// UPDATE (U)
|
||||
// UPDATE (U)
|
||||
|
||||
// unfortunately this is not working right now in Cassandra, can not update a single column in tuple :(
|
||||
//session.update()
|
||||
// .set(photoAlbum.folder().photo()::blob, "Helenus".getBytes())
|
||||
// .where(photoAlbum::id, eq(123))
|
||||
// .sync();
|
||||
// unfortunately this is not working right now in Cassandra, can not update a
|
||||
// single column in tuple :(
|
||||
// session.update()
|
||||
// .set(photoAlbum.folder().photo()::blob, "Helenus".getBytes())
|
||||
// .where(photoAlbum::id, eq(123))
|
||||
// .sync();
|
||||
|
||||
PhotoFolder expected =
|
||||
new PhotoFolder() {
|
||||
PhotoFolder expected = new PhotoFolder() {
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return "seconds";
|
||||
}
|
||||
@Override
|
||||
public String name() {
|
||||
return "seconds";
|
||||
}
|
||||
|
||||
@Override
|
||||
public Photo photo() {
|
||||
return photo;
|
||||
}
|
||||
};
|
||||
@Override
|
||||
public Photo photo() {
|
||||
return photo;
|
||||
}
|
||||
};
|
||||
|
||||
session.update().set(photoAlbum::folder, expected).where(photoAlbum::id, Query.eq(123)).sync();
|
||||
session.update().set(photoAlbum::folder, expected).where(photoAlbum::id, Query.eq(123)).sync();
|
||||
|
||||
actual =
|
||||
session
|
||||
.select(photoAlbum::folder)
|
||||
.where(photoAlbum::id, Query.eq(123))
|
||||
.sync()
|
||||
.findFirst()
|
||||
.get()
|
||||
._1;
|
||||
actual = session.select(photoAlbum::folder).where(photoAlbum::id, Query.eq(123)).sync().findFirst().get()._1;
|
||||
|
||||
Assert.assertEquals(expected.name(), actual.name());
|
||||
Assert.assertEquals(expected.name(), actual.name());
|
||||
|
||||
// INSERT (I)
|
||||
// let's insert null ;)
|
||||
// INSERT (I)
|
||||
// let's insert null ;)
|
||||
|
||||
session.update().set(photoAlbum::folder, null).where(photoAlbum::id, Query.eq(123)).sync();
|
||||
session.update().set(photoAlbum::folder, null).where(photoAlbum::id, Query.eq(123)).sync();
|
||||
|
||||
actual =
|
||||
session
|
||||
.select(photoAlbum::folder)
|
||||
.where(photoAlbum::id, Query.eq(123))
|
||||
.sync()
|
||||
.findFirst()
|
||||
.get()
|
||||
._1;
|
||||
Assert.assertNull(actual);
|
||||
actual = session.select(photoAlbum::folder).where(photoAlbum::id, Query.eq(123)).sync().findFirst().get()._1;
|
||||
Assert.assertNull(actual);
|
||||
|
||||
// DELETE (D)
|
||||
session.delete().where(photoAlbum::id, Query.eq(123)).sync();
|
||||
// DELETE (D)
|
||||
session.delete().where(photoAlbum::id, Query.eq(123)).sync();
|
||||
|
||||
long cnt =
|
||||
session.select(photoAlbum::folder).where(photoAlbum::id, Query.eq(123)).sync().count();
|
||||
Assert.assertEquals(0, cnt);
|
||||
}
|
||||
long cnt = session.select(photoAlbum::folder).where(photoAlbum::id, Query.eq(123)).sync().count();
|
||||
Assert.assertEquals(0, cnt);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,5 +20,5 @@ import net.helenus.mapping.annotation.Tuple;
|
|||
@Tuple
|
||||
public interface Photo {
|
||||
|
||||
byte[] blob();
|
||||
byte[] blob();
|
||||
}
|
||||
|
|
|
@ -21,8 +21,8 @@ import net.helenus.mapping.annotation.Table;
|
|||
@Table
|
||||
public interface PhotoAlbum {
|
||||
|
||||
@PartitionKey
|
||||
int id();
|
||||
@PartitionKey
|
||||
int id();
|
||||
|
||||
PhotoFolder folder();
|
||||
PhotoFolder folder();
|
||||
}
|
||||
|
|
|
@ -21,9 +21,9 @@ import net.helenus.mapping.annotation.Tuple;
|
|||
@Tuple
|
||||
public interface PhotoFolder {
|
||||
|
||||
@Column(ordinal = 0)
|
||||
String name();
|
||||
@Column(ordinal = 0)
|
||||
String name();
|
||||
|
||||
@Column(ordinal = 1)
|
||||
Photo photo();
|
||||
@Column(ordinal = 1)
|
||||
Photo photo();
|
||||
}
|
||||
|
|
|
@ -17,157 +17,155 @@ package net.helenus.test.integration.core.tuple;
|
|||
|
||||
import static net.helenus.core.Query.eq;
|
||||
|
||||
import com.datastax.driver.core.DataType;
|
||||
import com.datastax.driver.core.TupleType;
|
||||
import com.datastax.driver.core.TupleValue;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import com.datastax.driver.core.DataType;
|
||||
import com.datastax.driver.core.TupleType;
|
||||
import com.datastax.driver.core.TupleValue;
|
||||
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
|
||||
public class TupleTest extends AbstractEmbeddedCassandraTest {
|
||||
|
||||
static Album album;
|
||||
static Album album;
|
||||
|
||||
static HelenusSession session;
|
||||
static HelenusSession session;
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeTest() {
|
||||
Helenus.clearDslCache();
|
||||
session = Helenus.init(getSession()).showCql().add(Album.class).autoCreateDrop().get();
|
||||
album = Helenus.dsl(Album.class, session.getMetadata());
|
||||
}
|
||||
@BeforeClass
|
||||
public static void beforeTest() {
|
||||
Helenus.clearDslCache();
|
||||
session = Helenus.init(getSession()).showCql().add(Album.class).autoCreateDrop().get();
|
||||
album = Helenus.dsl(Album.class, session.getMetadata());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPrint() {
|
||||
System.out.println(album);
|
||||
}
|
||||
@Test
|
||||
public void testPrint() {
|
||||
System.out.println(album);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCruid() throws TimeoutException {
|
||||
@Test
|
||||
public void testCruid() throws TimeoutException {
|
||||
|
||||
AlbumInformation info =
|
||||
new AlbumInformation() {
|
||||
AlbumInformation info = new AlbumInformation() {
|
||||
|
||||
@Override
|
||||
public String about() {
|
||||
return "Cassandra";
|
||||
}
|
||||
@Override
|
||||
public String about() {
|
||||
return "Cassandra";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String place() {
|
||||
return "San Jose";
|
||||
}
|
||||
};
|
||||
@Override
|
||||
public String place() {
|
||||
return "San Jose";
|
||||
}
|
||||
};
|
||||
|
||||
// CREATE (C)
|
||||
// CREATE (C)
|
||||
|
||||
session.insert().value(album::id, 123).value(album::info, info).sync();
|
||||
session.insert().value(album::id, 123).value(album::info, info).sync();
|
||||
|
||||
// READ (R)
|
||||
// READ (R)
|
||||
|
||||
AlbumInformation actual =
|
||||
session.select(album::info).where(album::id, eq(123)).sync().findFirst().get()._1;
|
||||
AlbumInformation actual = session.select(album::info).where(album::id, eq(123)).sync().findFirst().get()._1;
|
||||
|
||||
Assert.assertEquals(info.about(), actual.about());
|
||||
Assert.assertEquals(info.place(), actual.place());
|
||||
Assert.assertEquals(info.about(), actual.about());
|
||||
Assert.assertEquals(info.place(), actual.place());
|
||||
|
||||
// UPDATE (U)
|
||||
// UPDATE (U)
|
||||
|
||||
// unfortunately this is not working right now in Cassandra, can not update a single column in tuple :(
|
||||
//session.update()
|
||||
// .set(album.info()::about, "Helenus")
|
||||
// .where(album::id, eq(123))
|
||||
// .sync();
|
||||
// unfortunately this is not working right now in Cassandra, can not update a
|
||||
// single column in tuple :(
|
||||
// session.update()
|
||||
// .set(album.info()::about, "Helenus")
|
||||
// .where(album::id, eq(123))
|
||||
// .sync();
|
||||
|
||||
AlbumInformation expected =
|
||||
new AlbumInformation() {
|
||||
AlbumInformation expected = new AlbumInformation() {
|
||||
|
||||
@Override
|
||||
public String about() {
|
||||
return "Helenus";
|
||||
}
|
||||
@Override
|
||||
public String about() {
|
||||
return "Helenus";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String place() {
|
||||
return "Santa Cruz";
|
||||
}
|
||||
};
|
||||
@Override
|
||||
public String place() {
|
||||
return "Santa Cruz";
|
||||
}
|
||||
};
|
||||
|
||||
session.update().set(album::info, expected).where(album::id, eq(123)).sync();
|
||||
session.update().set(album::info, expected).where(album::id, eq(123)).sync();
|
||||
|
||||
actual = session.select(album::info).where(album::id, eq(123)).sync().findFirst().get()._1;
|
||||
actual = session.select(album::info).where(album::id, eq(123)).sync().findFirst().get()._1;
|
||||
|
||||
Assert.assertEquals(expected.about(), actual.about());
|
||||
Assert.assertEquals(expected.place(), actual.place());
|
||||
Assert.assertEquals(expected.about(), actual.about());
|
||||
Assert.assertEquals(expected.place(), actual.place());
|
||||
|
||||
// INSERT (I)
|
||||
// let's insert null ;)
|
||||
// INSERT (I)
|
||||
// let's insert null ;)
|
||||
|
||||
session.update().set(album::info, null).where(album::id, eq(123)).sync();
|
||||
session.update().set(album::info, null).where(album::id, eq(123)).sync();
|
||||
|
||||
actual = session.select(album::info).where(album::id, eq(123)).sync().findFirst().get()._1;
|
||||
Assert.assertNull(actual);
|
||||
actual = session.select(album::info).where(album::id, eq(123)).sync().findFirst().get()._1;
|
||||
Assert.assertNull(actual);
|
||||
|
||||
// DELETE (D)
|
||||
session.delete().where(album::id, eq(123)).sync();
|
||||
// DELETE (D)
|
||||
session.delete().where(album::id, eq(123)).sync();
|
||||
|
||||
long cnt = session.select(album::info).where(album::id, eq(123)).sync().count();
|
||||
Assert.assertEquals(0, cnt);
|
||||
}
|
||||
long cnt = session.select(album::info).where(album::id, eq(123)).sync().count();
|
||||
Assert.assertEquals(0, cnt);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNoMapping() throws TimeoutException {
|
||||
@Test
|
||||
public void testNoMapping() throws TimeoutException {
|
||||
|
||||
TupleType tupleType = session.getMetadata().newTupleType(DataType.text(), DataType.text());
|
||||
TupleValue info = tupleType.newValue();
|
||||
TupleType tupleType = session.getMetadata().newTupleType(DataType.text(), DataType.text());
|
||||
TupleValue info = tupleType.newValue();
|
||||
|
||||
info.setString(0, "Cassandra");
|
||||
info.setString(1, "San Jose");
|
||||
info.setString(0, "Cassandra");
|
||||
info.setString(1, "San Jose");
|
||||
|
||||
// CREATE (C)
|
||||
// CREATE (C)
|
||||
|
||||
session.insert().value(album::id, 555).value(album::infoNoMapping, info).sync();
|
||||
session.insert().value(album::id, 555).value(album::infoNoMapping, info).sync();
|
||||
|
||||
// READ (R)
|
||||
// READ (R)
|
||||
|
||||
TupleValue actual =
|
||||
session.select(album::infoNoMapping).where(album::id, eq(555)).sync().findFirst().get()._1;
|
||||
TupleValue actual = session.select(album::infoNoMapping).where(album::id, eq(555)).sync().findFirst().get()._1;
|
||||
|
||||
Assert.assertEquals(info.getString(0), actual.getString(0));
|
||||
Assert.assertEquals(info.getString(1), actual.getString(1));
|
||||
Assert.assertEquals(info.getString(0), actual.getString(0));
|
||||
Assert.assertEquals(info.getString(1), actual.getString(1));
|
||||
|
||||
// UPDATE (U)
|
||||
// UPDATE (U)
|
||||
|
||||
TupleValue expected = tupleType.newValue();
|
||||
TupleValue expected = tupleType.newValue();
|
||||
|
||||
expected.setString(0, "Helenus");
|
||||
expected.setString(1, "Los Altos");
|
||||
expected.setString(0, "Helenus");
|
||||
expected.setString(1, "Los Altos");
|
||||
|
||||
session.update().set(album::infoNoMapping, expected).where(album::id, eq(555)).sync();
|
||||
session.update().set(album::infoNoMapping, expected).where(album::id, eq(555)).sync();
|
||||
|
||||
actual =
|
||||
session.select(album::infoNoMapping).where(album::id, eq(555)).sync().findFirst().get()._1;
|
||||
actual = session.select(album::infoNoMapping).where(album::id, eq(555)).sync().findFirst().get()._1;
|
||||
|
||||
Assert.assertEquals(expected.getString(0), actual.getString(0));
|
||||
Assert.assertEquals(expected.getString(1), actual.getString(1));
|
||||
Assert.assertEquals(expected.getString(0), actual.getString(0));
|
||||
Assert.assertEquals(expected.getString(1), actual.getString(1));
|
||||
|
||||
// INSERT (I)
|
||||
// let's insert null ;)
|
||||
// INSERT (I)
|
||||
// let's insert null ;)
|
||||
|
||||
session.update().set(album::infoNoMapping, null).where(album::id, eq(555)).sync();
|
||||
session.update().set(album::infoNoMapping, null).where(album::id, eq(555)).sync();
|
||||
|
||||
actual =
|
||||
session.select(album::infoNoMapping).where(album::id, eq(555)).sync().findFirst().get()._1;
|
||||
Assert.assertNull(actual);
|
||||
actual = session.select(album::infoNoMapping).where(album::id, eq(555)).sync().findFirst().get()._1;
|
||||
Assert.assertNull(actual);
|
||||
|
||||
// DELETE (D)
|
||||
session.delete().where(album::id, eq(555)).sync();
|
||||
// DELETE (D)
|
||||
session.delete().where(album::id, eq(555)).sync();
|
||||
|
||||
long cnt = session.select(album::infoNoMapping).where(album::id, eq(555)).sync().count();
|
||||
Assert.assertEquals(0, cnt);
|
||||
}
|
||||
long cnt = session.select(album::infoNoMapping).where(album::id, eq(555)).sync().count();
|
||||
Assert.assertEquals(0, cnt);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,9 +21,9 @@ import net.helenus.mapping.annotation.Tuple;
|
|||
@Tuple
|
||||
public interface Author {
|
||||
|
||||
@Column(ordinal = 0)
|
||||
String name();
|
||||
@Column(ordinal = 0)
|
||||
String name();
|
||||
|
||||
@Column(ordinal = 1)
|
||||
String city();
|
||||
@Column(ordinal = 1)
|
||||
String city();
|
||||
}
|
||||
|
|
|
@ -18,22 +18,23 @@ package net.helenus.test.integration.core.tuplecollection;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import net.helenus.mapping.annotation.PartitionKey;
|
||||
import net.helenus.mapping.annotation.Table;
|
||||
|
||||
@Table
|
||||
public interface Book {
|
||||
|
||||
@PartitionKey
|
||||
int id();
|
||||
@PartitionKey
|
||||
int id();
|
||||
|
||||
List<Author> authors();
|
||||
List<Author> authors();
|
||||
|
||||
Set<Author> reviewers();
|
||||
Set<Author> reviewers();
|
||||
|
||||
Map<Integer, Section> contents();
|
||||
Map<Integer, Section> contents();
|
||||
|
||||
Map<Section, String> notes();
|
||||
Map<Section, String> notes();
|
||||
|
||||
Map<Section, Author> writers();
|
||||
Map<Section, Author> writers();
|
||||
}
|
||||
|
|
|
@ -21,9 +21,9 @@ import net.helenus.mapping.annotation.Tuple;
|
|||
@Tuple
|
||||
public interface Section {
|
||||
|
||||
@Column(ordinal = 0)
|
||||
String title();
|
||||
@Column(ordinal = 0)
|
||||
String title();
|
||||
|
||||
@Column(ordinal = 1)
|
||||
int page();
|
||||
@Column(ordinal = 1)
|
||||
int page();
|
||||
}
|
||||
|
|
|
@ -15,124 +15,138 @@
|
|||
*/
|
||||
package net.helenus.test.integration.core.tuplecollection;
|
||||
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
|
||||
public abstract class TupleCollectionTest extends AbstractEmbeddedCassandraTest {
|
||||
|
||||
static Book book;
|
||||
static Book book;
|
||||
|
||||
static HelenusSession session;
|
||||
static HelenusSession session;
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeTest() {
|
||||
session = Helenus.init(getSession()).showCql().add(Book.class).autoCreateDrop().get();
|
||||
book = Helenus.dsl(Book.class, session.getMetadata());
|
||||
}
|
||||
@BeforeClass
|
||||
public static void beforeTest() {
|
||||
session = Helenus.init(getSession()).showCql().add(Book.class).autoCreateDrop().get();
|
||||
book = Helenus.dsl(Book.class, session.getMetadata());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void test() {
|
||||
System.out.println(book);
|
||||
}
|
||||
@Test
|
||||
public void test() {
|
||||
System.out.println(book);
|
||||
}
|
||||
|
||||
public static final class AuthorImpl implements Author {
|
||||
public static final class AuthorImpl implements Author {
|
||||
|
||||
String name;
|
||||
String city;
|
||||
String name;
|
||||
String city;
|
||||
|
||||
AuthorImpl(String name, String city) {
|
||||
this.name = name;
|
||||
this.city = city;
|
||||
}
|
||||
AuthorImpl(String name, String city) {
|
||||
this.name = name;
|
||||
this.city = city;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return name;
|
||||
}
|
||||
@Override
|
||||
public String name() {
|
||||
return name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String city() {
|
||||
return city;
|
||||
}
|
||||
@Override
|
||||
public String city() {
|
||||
return city;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + ((city == null) ? 0 : city.hashCode());
|
||||
result = prime * result + ((name == null) ? 0 : name.hashCode());
|
||||
return result;
|
||||
}
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + ((city == null) ? 0 : city.hashCode());
|
||||
result = prime * result + ((name == null) ? 0 : name.hashCode());
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) return true;
|
||||
if (obj == null) return false;
|
||||
if (getClass() != obj.getClass()) return false;
|
||||
AuthorImpl other = (AuthorImpl) obj;
|
||||
if (city == null) {
|
||||
if (other.city != null) return false;
|
||||
} else if (!city.equals(other.city)) return false;
|
||||
if (name == null) {
|
||||
if (other.name != null) return false;
|
||||
} else if (!name.equals(other.name)) return false;
|
||||
return true;
|
||||
}
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj)
|
||||
return true;
|
||||
if (obj == null)
|
||||
return false;
|
||||
if (getClass() != obj.getClass())
|
||||
return false;
|
||||
AuthorImpl other = (AuthorImpl) obj;
|
||||
if (city == null) {
|
||||
if (other.city != null)
|
||||
return false;
|
||||
} else if (!city.equals(other.city))
|
||||
return false;
|
||||
if (name == null) {
|
||||
if (other.name != null)
|
||||
return false;
|
||||
} else if (!name.equals(other.name))
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "AuthorImpl [name=" + name + ", city=" + city + "]";
|
||||
}
|
||||
}
|
||||
@Override
|
||||
public String toString() {
|
||||
return "AuthorImpl [name=" + name + ", city=" + city + "]";
|
||||
}
|
||||
}
|
||||
|
||||
public static final class SectionImpl implements Section {
|
||||
public static final class SectionImpl implements Section {
|
||||
|
||||
String title;
|
||||
int page;
|
||||
String title;
|
||||
int page;
|
||||
|
||||
SectionImpl(String title, int page) {
|
||||
this.title = title;
|
||||
this.page = page;
|
||||
}
|
||||
SectionImpl(String title, int page) {
|
||||
this.title = title;
|
||||
this.page = page;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String title() {
|
||||
return title;
|
||||
}
|
||||
@Override
|
||||
public String title() {
|
||||
return title;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int page() {
|
||||
return page;
|
||||
}
|
||||
@Override
|
||||
public int page() {
|
||||
return page;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + page;
|
||||
result = prime * result + ((title == null) ? 0 : title.hashCode());
|
||||
return result;
|
||||
}
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + page;
|
||||
result = prime * result + ((title == null) ? 0 : title.hashCode());
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) return true;
|
||||
if (obj == null) return false;
|
||||
if (getClass() != obj.getClass()) return false;
|
||||
SectionImpl other = (SectionImpl) obj;
|
||||
if (page != other.page) return false;
|
||||
if (title == null) {
|
||||
if (other.title != null) return false;
|
||||
} else if (!title.equals(other.title)) return false;
|
||||
return true;
|
||||
}
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj)
|
||||
return true;
|
||||
if (obj == null)
|
||||
return false;
|
||||
if (getClass() != obj.getClass())
|
||||
return false;
|
||||
SectionImpl other = (SectionImpl) obj;
|
||||
if (page != other.page)
|
||||
return false;
|
||||
if (title == null) {
|
||||
if (other.title != null)
|
||||
return false;
|
||||
} else if (!title.equals(other.title))
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "SectionImpl [title=" + title + ", page=" + page + "]";
|
||||
}
|
||||
}
|
||||
@Override
|
||||
public String toString() {
|
||||
return "SectionImpl [title=" + title + ", page=" + page + "]";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,130 +18,123 @@ package net.helenus.test.integration.core.tuplecollection;
|
|||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import net.helenus.core.Query;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
import net.helenus.core.Query;
|
||||
|
||||
public class TupleKeyMapTest extends TupleCollectionTest {
|
||||
|
||||
@Test
|
||||
public void testKeyMapCRUID() throws TimeoutException {
|
||||
@Test
|
||||
public void testKeyMapCRUID() throws TimeoutException {
|
||||
|
||||
int id = 888;
|
||||
int id = 888;
|
||||
|
||||
Map<Section, String> notes = new HashMap<Section, String>();
|
||||
notes.put(new SectionImpl("first", 1), "value1");
|
||||
notes.put(new SectionImpl("second", 2), "value2");
|
||||
Map<Section, String> notes = new HashMap<Section, String>();
|
||||
notes.put(new SectionImpl("first", 1), "value1");
|
||||
notes.put(new SectionImpl("second", 2), "value2");
|
||||
|
||||
// CREATE
|
||||
// CREATE
|
||||
|
||||
session.insert().value(book::id, id).value(book::notes, notes).sync();
|
||||
session.insert().value(book::id, id).value(book::notes, notes).sync();
|
||||
|
||||
// READ
|
||||
// READ
|
||||
|
||||
// read full object
|
||||
// read full object
|
||||
|
||||
Book actual = session.<Book>select(book).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualMaps(notes, actual.notes());
|
||||
Assert.assertNull(actual.reviewers());
|
||||
Assert.assertNull(actual.writers());
|
||||
Assert.assertNull(actual.contents());
|
||||
Book actual = session.<Book>select(book).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualMaps(notes, actual.notes());
|
||||
Assert.assertNull(actual.reviewers());
|
||||
Assert.assertNull(actual.writers());
|
||||
Assert.assertNull(actual.contents());
|
||||
|
||||
// read full map
|
||||
// read full map
|
||||
|
||||
Map<Section, String> actualMap =
|
||||
session.select(book::notes).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(notes, actualMap);
|
||||
Map<Section, String> actualMap = session.select(book::notes).where(book::id, Query.eq(id)).sync().findFirst()
|
||||
.get()._1;
|
||||
assertEqualMaps(notes, actualMap);
|
||||
|
||||
// read single key-value in map
|
||||
// read single key-value in map
|
||||
|
||||
String cql =
|
||||
session
|
||||
.select(Query.get(book::notes, new SectionImpl("first", 1)))
|
||||
.where(book::id, Query.eq(id))
|
||||
.cql();
|
||||
String cql = session.select(Query.get(book::notes, new SectionImpl("first", 1))).where(book::id, Query.eq(id))
|
||||
.cql();
|
||||
|
||||
System.out.println("Still not supporting cql = " + cql);
|
||||
System.out.println("Still not supporting cql = " + cql);
|
||||
|
||||
// UPDATE
|
||||
// UPDATE
|
||||
|
||||
Map<Section, String> expected = new HashMap<Section, String>();
|
||||
expected.put(new SectionImpl("f", 1), "v1");
|
||||
expected.put(new SectionImpl("s", 1), "v2");
|
||||
Map<Section, String> expected = new HashMap<Section, String>();
|
||||
expected.put(new SectionImpl("f", 1), "v1");
|
||||
expected.put(new SectionImpl("s", 1), "v2");
|
||||
|
||||
session.update().set(book::notes, expected).where(book::id, Query.eq(id)).sync();
|
||||
session.update().set(book::notes, expected).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actual = session.<Book>select(book).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualMaps(expected, actual.notes());
|
||||
actual = session.<Book>select(book).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualMaps(expected, actual.notes());
|
||||
|
||||
// INSERT
|
||||
// INSERT
|
||||
|
||||
// put operation
|
||||
// put operation
|
||||
|
||||
Section third = new SectionImpl("t", 3);
|
||||
Section third = new SectionImpl("t", 3);
|
||||
|
||||
expected.put(third, "v3");
|
||||
session.update().put(book::notes, third, "v3").where(book::id, Query.eq(id)).sync();
|
||||
expected.put(third, "v3");
|
||||
session.update().put(book::notes, third, "v3").where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualMap =
|
||||
session.select(book::notes).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
actualMap = session.select(book::notes).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
|
||||
// putAll operation
|
||||
expected.putAll(notes);
|
||||
session.update().putAll(book::notes, notes).where(book::id, Query.eq(id)).sync();
|
||||
// putAll operation
|
||||
expected.putAll(notes);
|
||||
session.update().putAll(book::notes, notes).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualMap =
|
||||
session.select(book::notes).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
actualMap = session.select(book::notes).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
|
||||
// put existing
|
||||
// put existing
|
||||
|
||||
expected.put(third, "v33");
|
||||
session.update().put(book::notes, third, "v33").where(book::id, Query.eq(id)).sync();
|
||||
expected.put(third, "v33");
|
||||
session.update().put(book::notes, third, "v33").where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualMap =
|
||||
session.select(book::notes).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
actualMap = session.select(book::notes).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
|
||||
// DELETE
|
||||
// DELETE
|
||||
|
||||
// remove single key
|
||||
// remove single key
|
||||
|
||||
expected.remove(third);
|
||||
session.update().put(book::notes, third, null).where(book::id, Query.eq(id)).sync();
|
||||
expected.remove(third);
|
||||
session.update().put(book::notes, third, null).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualMap =
|
||||
session.select(book::notes).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
actualMap = session.select(book::notes).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
|
||||
// remove full map
|
||||
// remove full map
|
||||
|
||||
session.update().set(book::notes, null).where(book::id, Query.eq(id)).sync();
|
||||
session.update().set(book::notes, null).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualMap =
|
||||
session.select(book::notes).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertNull(actualMap);
|
||||
actualMap = session.select(book::notes).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertNull(actualMap);
|
||||
|
||||
// remove object
|
||||
// remove object
|
||||
|
||||
session.delete().where(book::id, Query.eq(id)).sync();
|
||||
Long cnt = session.count().where(book::id, Query.eq(id)).sync();
|
||||
Assert.assertEquals(Long.valueOf(0), cnt);
|
||||
}
|
||||
session.delete().where(book::id, Query.eq(id)).sync();
|
||||
Long cnt = session.count().where(book::id, Query.eq(id)).sync();
|
||||
Assert.assertEquals(Long.valueOf(0), cnt);
|
||||
}
|
||||
|
||||
private void assertEqualMaps(Map<Section, String> expected, Map<Section, String> actual) {
|
||||
private void assertEqualMaps(Map<Section, String> expected, Map<Section, String> actual) {
|
||||
|
||||
Assert.assertEquals(expected.size(), actual.size());
|
||||
Assert.assertEquals(expected.size(), actual.size());
|
||||
|
||||
for (Section e : expected.keySet()) {
|
||||
Section a =
|
||||
actual.keySet().stream().filter(p -> p.title().equals(e.title())).findFirst().get();
|
||||
Assert.assertEquals(e.title(), a.title());
|
||||
Assert.assertEquals(e.page(), a.page());
|
||||
Assert.assertEquals(expected.get(e), actual.get(a));
|
||||
}
|
||||
}
|
||||
for (Section e : expected.keySet()) {
|
||||
Section a = actual.keySet().stream().filter(p -> p.title().equals(e.title())).findFirst().get();
|
||||
Assert.assertEquals(e.title(), a.title());
|
||||
Assert.assertEquals(e.page(), a.page());
|
||||
Assert.assertEquals(expected.get(e), actual.get(a));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,157 +18,145 @@ package net.helenus.test.integration.core.tuplecollection;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import net.helenus.core.Query;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
import net.helenus.core.Query;
|
||||
|
||||
public class TupleListTest extends TupleCollectionTest {
|
||||
|
||||
@Test
|
||||
public void testListCRUID() throws TimeoutException {
|
||||
@Test
|
||||
public void testListCRUID() throws TimeoutException {
|
||||
|
||||
int id = 777;
|
||||
int id = 777;
|
||||
|
||||
List<Author> authors = new ArrayList<Author>();
|
||||
authors.add(new AuthorImpl("Alex", "San Jose"));
|
||||
authors.add(new AuthorImpl("Bob", "San Francisco"));
|
||||
List<Author> authors = new ArrayList<Author>();
|
||||
authors.add(new AuthorImpl("Alex", "San Jose"));
|
||||
authors.add(new AuthorImpl("Bob", "San Francisco"));
|
||||
|
||||
// CREATE
|
||||
// CREATE
|
||||
|
||||
session.insert().value(book::id, id).value(book::authors, authors).sync();
|
||||
session.insert().value(book::id, id).value(book::authors, authors).sync();
|
||||
|
||||
// READ
|
||||
// READ
|
||||
|
||||
// read full object
|
||||
// read full object
|
||||
|
||||
Book actual = session.select(Book.class).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualLists(authors, actual.authors());
|
||||
Assert.assertNull(actual.reviewers());
|
||||
Assert.assertNull(actual.contents());
|
||||
Book actual = session.select(Book.class).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualLists(authors, actual.authors());
|
||||
Assert.assertNull(actual.reviewers());
|
||||
Assert.assertNull(actual.contents());
|
||||
|
||||
// read full list
|
||||
// read full list
|
||||
|
||||
List<Author> actualList =
|
||||
session.select(book::authors).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualLists(authors, actualList);
|
||||
List<Author> actualList = session.select(book::authors).where(book::id, Query.eq(id)).sync().findFirst()
|
||||
.get()._1;
|
||||
assertEqualLists(authors, actualList);
|
||||
|
||||
// read single value by index
|
||||
// read single value by index
|
||||
|
||||
String cql = session.select(Query.getIdx(book::authors, 1)).where(book::id, Query.eq(id)).cql();
|
||||
String cql = session.select(Query.getIdx(book::authors, 1)).where(book::id, Query.eq(id)).cql();
|
||||
|
||||
System.out.println("Still not supporting cql = " + cql);
|
||||
System.out.println("Still not supporting cql = " + cql);
|
||||
|
||||
// UPDATE
|
||||
// UPDATE
|
||||
|
||||
List<Author> expected = new ArrayList<Author>();
|
||||
expected.add(new AuthorImpl("Unknown", "City 17"));
|
||||
List<Author> expected = new ArrayList<Author>();
|
||||
expected.add(new AuthorImpl("Unknown", "City 17"));
|
||||
|
||||
session.update().set(book::authors, expected).where(book::id, Query.eq(id)).sync();
|
||||
session.update().set(book::authors, expected).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actual = session.select(Book.class).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualLists(expected, actual.authors());
|
||||
actual = session.select(Book.class).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualLists(expected, actual.authors());
|
||||
|
||||
// INSERT
|
||||
// INSERT
|
||||
|
||||
// prepend operation
|
||||
// prepend operation
|
||||
|
||||
expected.add(0, new AuthorImpl("Prepend", "PrependCity"));
|
||||
session
|
||||
.update()
|
||||
.prepend(book::authors, new AuthorImpl("Prepend", "PrependCity"))
|
||||
.where(book::id, Query.eq(id))
|
||||
.sync();
|
||||
expected.add(0, new AuthorImpl("Prepend", "PrependCity"));
|
||||
session.update().prepend(book::authors, new AuthorImpl("Prepend", "PrependCity")).where(book::id, Query.eq(id))
|
||||
.sync();
|
||||
|
||||
actualList =
|
||||
session.select(book::authors).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualLists(expected, actualList);
|
||||
actualList = session.select(book::authors).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualLists(expected, actualList);
|
||||
|
||||
// append operation
|
||||
// append operation
|
||||
|
||||
expected.add(new AuthorImpl("Append", "AppendCity"));
|
||||
session
|
||||
.update()
|
||||
.append(book::authors, new AuthorImpl("Append", "AppendCity"))
|
||||
.where(book::id, Query.eq(id))
|
||||
.sync();
|
||||
expected.add(new AuthorImpl("Append", "AppendCity"));
|
||||
session.update().append(book::authors, new AuthorImpl("Append", "AppendCity")).where(book::id, Query.eq(id))
|
||||
.sync();
|
||||
|
||||
actualList =
|
||||
session.select(book::authors).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualLists(expected, actualList);
|
||||
actualList = session.select(book::authors).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualLists(expected, actualList);
|
||||
|
||||
// prependAll operation
|
||||
expected.addAll(0, authors);
|
||||
session.update().prependAll(book::authors, authors).where(book::id, Query.eq(id)).sync();
|
||||
// prependAll operation
|
||||
expected.addAll(0, authors);
|
||||
session.update().prependAll(book::authors, authors).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualList =
|
||||
session.select(book::authors).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualLists(expected, actualList);
|
||||
actualList = session.select(book::authors).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualLists(expected, actualList);
|
||||
|
||||
// appendAll operation
|
||||
expected.addAll(authors);
|
||||
session.update().appendAll(book::authors, authors).where(book::id, Query.eq(id)).sync();
|
||||
// appendAll operation
|
||||
expected.addAll(authors);
|
||||
session.update().appendAll(book::authors, authors).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualList =
|
||||
session.select(book::authors).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualLists(expected, actualList);
|
||||
actualList = session.select(book::authors).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualLists(expected, actualList);
|
||||
|
||||
// set by Index
|
||||
// set by Index
|
||||
|
||||
Author inserted = new AuthorImpl("Insert", "InsertCity");
|
||||
expected.set(5, inserted);
|
||||
session.update().setIdx(book::authors, 5, inserted).where(book::id, Query.eq(id)).sync();
|
||||
Author inserted = new AuthorImpl("Insert", "InsertCity");
|
||||
expected.set(5, inserted);
|
||||
session.update().setIdx(book::authors, 5, inserted).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualList =
|
||||
session.select(book::authors).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualLists(expected, actualList);
|
||||
actualList = session.select(book::authors).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualLists(expected, actualList);
|
||||
|
||||
// DELETE
|
||||
// DELETE
|
||||
|
||||
// remove single value
|
||||
// remove single value
|
||||
|
||||
expected.remove(inserted);
|
||||
session.update().discard(book::authors, inserted).where(book::id, Query.eq(id)).sync();
|
||||
expected.remove(inserted);
|
||||
session.update().discard(book::authors, inserted).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualList =
|
||||
session.select(book::authors).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualLists(expected, actualList);
|
||||
actualList = session.select(book::authors).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualLists(expected, actualList);
|
||||
|
||||
// remove values
|
||||
// remove values
|
||||
|
||||
expected.removeAll(authors);
|
||||
session.update().discardAll(book::authors, authors).where(book::id, Query.eq(id)).sync();
|
||||
expected.removeAll(authors);
|
||||
session.update().discardAll(book::authors, authors).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualList =
|
||||
session.select(book::authors).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualLists(expected, actualList);
|
||||
actualList = session.select(book::authors).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualLists(expected, actualList);
|
||||
|
||||
// remove full list
|
||||
// remove full list
|
||||
|
||||
session.update().set(book::authors, null).where(book::id, Query.eq(id)).sync();
|
||||
session.update().set(book::authors, null).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualList =
|
||||
session.select(book::authors).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertNull(actualList);
|
||||
actualList = session.select(book::authors).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertNull(actualList);
|
||||
|
||||
// remove object
|
||||
// remove object
|
||||
|
||||
session.delete().where(book::id, Query.eq(id)).sync();
|
||||
Long cnt = session.count().where(book::id, Query.eq(id)).sync();
|
||||
Assert.assertEquals(Long.valueOf(0), cnt);
|
||||
}
|
||||
session.delete().where(book::id, Query.eq(id)).sync();
|
||||
Long cnt = session.count().where(book::id, Query.eq(id)).sync();
|
||||
Assert.assertEquals(Long.valueOf(0), cnt);
|
||||
}
|
||||
|
||||
private void assertEqualLists(List<Author> expected, List<Author> actual) {
|
||||
Assert.assertEquals(expected.size(), actual.size());
|
||||
private void assertEqualLists(List<Author> expected, List<Author> actual) {
|
||||
Assert.assertEquals(expected.size(), actual.size());
|
||||
|
||||
int size = expected.size();
|
||||
int size = expected.size();
|
||||
|
||||
for (int i = 0; i != size; ++i) {
|
||||
Author e = expected.get(i);
|
||||
Author a = actual.get(i);
|
||||
Assert.assertEquals(e.name(), a.name());
|
||||
Assert.assertEquals(e.city(), a.city());
|
||||
}
|
||||
}
|
||||
for (int i = 0; i != size; ++i) {
|
||||
Author e = expected.get(i);
|
||||
Author a = actual.get(i);
|
||||
Assert.assertEquals(e.name(), a.name());
|
||||
Assert.assertEquals(e.city(), a.city());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,140 +18,130 @@ package net.helenus.test.integration.core.tuplecollection;
|
|||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import net.helenus.core.Query;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
import net.helenus.core.Query;
|
||||
|
||||
public class TupleMapTest extends TupleCollectionTest {
|
||||
|
||||
@Test
|
||||
public void testMapCRUID() throws TimeoutException {
|
||||
@Test
|
||||
public void testMapCRUID() throws TimeoutException {
|
||||
|
||||
int id = 333;
|
||||
int id = 333;
|
||||
|
||||
Map<Section, Author> writers = new HashMap<Section, Author>();
|
||||
writers.put(
|
||||
new SectionImpl("first", 1), new TupleCollectionTest.AuthorImpl("Alex", "San Jose"));
|
||||
writers.put(
|
||||
new SectionImpl("second", 2), new TupleCollectionTest.AuthorImpl("Bob", "San Francisco"));
|
||||
Map<Section, Author> writers = new HashMap<Section, Author>();
|
||||
writers.put(new SectionImpl("first", 1), new TupleCollectionTest.AuthorImpl("Alex", "San Jose"));
|
||||
writers.put(new SectionImpl("second", 2), new TupleCollectionTest.AuthorImpl("Bob", "San Francisco"));
|
||||
|
||||
// CREATE
|
||||
// CREATE
|
||||
|
||||
session.insert().value(book::id, id).value(book::writers, writers).sync();
|
||||
session.insert().value(book::id, id).value(book::writers, writers).sync();
|
||||
|
||||
// READ
|
||||
// READ
|
||||
|
||||
// read full object
|
||||
// read full object
|
||||
|
||||
Book actual =
|
||||
session.<Book>select(book).where(book::id, Query.eq(id)).single().sync().orElse(null);
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualMaps(writers, actual.writers());
|
||||
Assert.assertNull(actual.reviewers());
|
||||
Assert.assertNull(actual.notes());
|
||||
Assert.assertNull(actual.contents());
|
||||
Book actual = session.<Book>select(book).where(book::id, Query.eq(id)).single().sync().orElse(null);
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualMaps(writers, actual.writers());
|
||||
Assert.assertNull(actual.reviewers());
|
||||
Assert.assertNull(actual.notes());
|
||||
Assert.assertNull(actual.contents());
|
||||
|
||||
// read full map
|
||||
// read full map
|
||||
|
||||
Map<Section, Author> actualMap =
|
||||
session.select(book::writers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(writers, actualMap);
|
||||
Map<Section, Author> actualMap = session.select(book::writers).where(book::id, Query.eq(id)).sync().findFirst()
|
||||
.get()._1;
|
||||
assertEqualMaps(writers, actualMap);
|
||||
|
||||
// read single key-value in map
|
||||
// read single key-value in map
|
||||
|
||||
String cql =
|
||||
session
|
||||
.select(Query.get(book::writers, new SectionImpl("first", 1)))
|
||||
.where(book::id, Query.eq(id))
|
||||
.cql();
|
||||
String cql = session.select(Query.get(book::writers, new SectionImpl("first", 1))).where(book::id, Query.eq(id))
|
||||
.cql();
|
||||
|
||||
System.out.println("Still not supporting cql = " + cql);
|
||||
System.out.println("Still not supporting cql = " + cql);
|
||||
|
||||
// UPDATE
|
||||
// UPDATE
|
||||
|
||||
Map<Section, Author> expected = new HashMap<Section, Author>();
|
||||
expected.put(new SectionImpl("f", 1), new TupleCollectionTest.AuthorImpl("A", "SJ"));
|
||||
expected.put(new SectionImpl("s", 1), new TupleCollectionTest.AuthorImpl("B", "SF"));
|
||||
Map<Section, Author> expected = new HashMap<Section, Author>();
|
||||
expected.put(new SectionImpl("f", 1), new TupleCollectionTest.AuthorImpl("A", "SJ"));
|
||||
expected.put(new SectionImpl("s", 1), new TupleCollectionTest.AuthorImpl("B", "SF"));
|
||||
|
||||
session.update().set(book::writers, expected).where(book::id, Query.eq(id)).sync();
|
||||
session.update().set(book::writers, expected).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actual = session.<Book>select(book).where(book::id, Query.eq(id)).single().sync().orElse(null);
|
||||
actual = session.<Book>select(book).where(book::id, Query.eq(id)).single().sync().orElse(null);
|
||||
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualMaps(expected, actual.writers());
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualMaps(expected, actual.writers());
|
||||
|
||||
// INSERT
|
||||
// INSERT
|
||||
|
||||
// put operation
|
||||
// put operation
|
||||
|
||||
Section third = new SectionImpl("t", 3);
|
||||
Author unk = new TupleCollectionTest.AuthorImpl("Unk", "City 17");
|
||||
Section third = new SectionImpl("t", 3);
|
||||
Author unk = new TupleCollectionTest.AuthorImpl("Unk", "City 17");
|
||||
|
||||
expected.put(third, unk);
|
||||
session.update().put(book::writers, third, unk).where(book::id, Query.eq(id)).sync();
|
||||
expected.put(third, unk);
|
||||
session.update().put(book::writers, third, unk).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualMap =
|
||||
session.select(book::writers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
actualMap = session.select(book::writers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
|
||||
// putAll operation
|
||||
expected.putAll(writers);
|
||||
session.update().putAll(book::writers, writers).where(book::id, Query.eq(id)).sync();
|
||||
// putAll operation
|
||||
expected.putAll(writers);
|
||||
session.update().putAll(book::writers, writers).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualMap =
|
||||
session.select(book::writers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
actualMap = session.select(book::writers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
|
||||
// put existing
|
||||
// put existing
|
||||
|
||||
expected.put(third, unk);
|
||||
session.update().put(book::writers, third, unk).where(book::id, Query.eq(id)).sync();
|
||||
expected.put(third, unk);
|
||||
session.update().put(book::writers, third, unk).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualMap =
|
||||
session.select(book::writers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
actualMap = session.select(book::writers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
|
||||
// DELETE
|
||||
// DELETE
|
||||
|
||||
// remove single key
|
||||
// remove single key
|
||||
|
||||
expected.remove(third);
|
||||
session.update().put(book::writers, third, null).where(book::id, Query.eq(id)).sync();
|
||||
expected.remove(third);
|
||||
session.update().put(book::writers, third, null).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualMap =
|
||||
session.select(book::writers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
actualMap = session.select(book::writers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
|
||||
// remove full map
|
||||
// remove full map
|
||||
|
||||
session.update().set(book::writers, null).where(book::id, Query.eq(id)).sync();
|
||||
session.update().set(book::writers, null).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualMap =
|
||||
session.select(book::writers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertNull(actualMap);
|
||||
actualMap = session.select(book::writers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertNull(actualMap);
|
||||
|
||||
// remove object
|
||||
// remove object
|
||||
|
||||
session.delete().where(book::id, Query.eq(id)).sync();
|
||||
Long cnt = session.count().where(book::id, Query.eq(id)).sync();
|
||||
Assert.assertEquals(Long.valueOf(0), cnt);
|
||||
}
|
||||
session.delete().where(book::id, Query.eq(id)).sync();
|
||||
Long cnt = session.count().where(book::id, Query.eq(id)).sync();
|
||||
Assert.assertEquals(Long.valueOf(0), cnt);
|
||||
}
|
||||
|
||||
private void assertEqualMaps(Map<Section, Author> expected, Map<Section, Author> actual) {
|
||||
private void assertEqualMaps(Map<Section, Author> expected, Map<Section, Author> actual) {
|
||||
|
||||
Assert.assertEquals(expected.size(), actual.size());
|
||||
Assert.assertEquals(expected.size(), actual.size());
|
||||
|
||||
for (Section e : expected.keySet()) {
|
||||
Section a =
|
||||
actual.keySet().stream().filter(p -> p.title().equals(e.title())).findFirst().get();
|
||||
Assert.assertEquals(e.title(), a.title());
|
||||
Assert.assertEquals(e.page(), a.page());
|
||||
for (Section e : expected.keySet()) {
|
||||
Section a = actual.keySet().stream().filter(p -> p.title().equals(e.title())).findFirst().get();
|
||||
Assert.assertEquals(e.title(), a.title());
|
||||
Assert.assertEquals(e.page(), a.page());
|
||||
|
||||
Author ea = expected.get(e);
|
||||
Author aa = actual.get(a);
|
||||
Author ea = expected.get(e);
|
||||
Author aa = actual.get(a);
|
||||
|
||||
Assert.assertEquals(ea.name(), aa.name());
|
||||
Assert.assertEquals(ea.city(), aa.city());
|
||||
}
|
||||
}
|
||||
Assert.assertEquals(ea.name(), aa.name());
|
||||
Assert.assertEquals(ea.city(), aa.city());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,107 +18,100 @@ package net.helenus.test.integration.core.tuplecollection;
|
|||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import net.helenus.core.Query;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
import net.helenus.core.Query;
|
||||
|
||||
public class TupleSetTest extends TupleCollectionTest {
|
||||
|
||||
@Test
|
||||
public void testSetCRUID() throws TimeoutException {
|
||||
@Test
|
||||
public void testSetCRUID() throws TimeoutException {
|
||||
|
||||
int id = 555;
|
||||
int id = 555;
|
||||
|
||||
// CREATE
|
||||
// CREATE
|
||||
|
||||
Set<Author> reviewers = new HashSet<Author>();
|
||||
reviewers.add(new AuthorImpl("Alex", "San Jose"));
|
||||
reviewers.add(new AuthorImpl("Bob", "San Francisco"));
|
||||
Set<Author> reviewers = new HashSet<Author>();
|
||||
reviewers.add(new AuthorImpl("Alex", "San Jose"));
|
||||
reviewers.add(new AuthorImpl("Bob", "San Francisco"));
|
||||
|
||||
session.insert().value(book::id, id).value(book::reviewers, reviewers).sync();
|
||||
session.insert().value(book::id, id).value(book::reviewers, reviewers).sync();
|
||||
|
||||
// READ
|
||||
// READ
|
||||
|
||||
Book actual = session.select(Book.class).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualSets(reviewers, actual.reviewers());
|
||||
Book actual = session.select(Book.class).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualSets(reviewers, actual.reviewers());
|
||||
|
||||
// UPDATE
|
||||
// UPDATE
|
||||
|
||||
Set<Author> expected = new HashSet<Author>();
|
||||
expected.add(new AuthorImpl("Craig", "Los Altos"));
|
||||
Set<Author> expected = new HashSet<Author>();
|
||||
expected.add(new AuthorImpl("Craig", "Los Altos"));
|
||||
|
||||
session.update().set(book::reviewers, expected).where(book::id, Query.eq(id)).sync();
|
||||
session.update().set(book::reviewers, expected).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
Set<Author> actualSet =
|
||||
session.select(book::reviewers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualSets(expected, actualSet);
|
||||
Set<Author> actualSet = session.select(book::reviewers).where(book::id, Query.eq(id)).sync().findFirst()
|
||||
.get()._1;
|
||||
assertEqualSets(expected, actualSet);
|
||||
|
||||
// add operation
|
||||
// add operation
|
||||
|
||||
expected.add(new AuthorImpl("Add", "AddCity"));
|
||||
session
|
||||
.update()
|
||||
.add(book::reviewers, new AuthorImpl("Add", "AddCity"))
|
||||
.where(book::id, Query.eq(id))
|
||||
.sync();
|
||||
expected.add(new AuthorImpl("Add", "AddCity"));
|
||||
session.update().add(book::reviewers, new AuthorImpl("Add", "AddCity")).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualSet =
|
||||
session.select(book::reviewers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualSets(expected, actualSet);
|
||||
actualSet = session.select(book::reviewers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualSets(expected, actualSet);
|
||||
|
||||
// addAll operation
|
||||
expected.addAll(reviewers);
|
||||
session.update().addAll(book::reviewers, reviewers).where(book::id, Query.eq(id)).sync();
|
||||
// addAll operation
|
||||
expected.addAll(reviewers);
|
||||
session.update().addAll(book::reviewers, reviewers).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualSet =
|
||||
session.select(book::reviewers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualSets(expected, actualSet);
|
||||
actualSet = session.select(book::reviewers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualSets(expected, actualSet);
|
||||
|
||||
// DELETE
|
||||
// DELETE
|
||||
|
||||
// remove single value
|
||||
// remove single value
|
||||
|
||||
Author a = expected.stream().filter(p -> p.name().equals("Add")).findFirst().get();
|
||||
expected.remove(a);
|
||||
Author a = expected.stream().filter(p -> p.name().equals("Add")).findFirst().get();
|
||||
expected.remove(a);
|
||||
|
||||
session.update().remove(book::reviewers, a).where(book::id, Query.eq(id)).sync();
|
||||
session.update().remove(book::reviewers, a).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualSet =
|
||||
session.select(book::reviewers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualSets(expected, actualSet);
|
||||
actualSet = session.select(book::reviewers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualSets(expected, actualSet);
|
||||
|
||||
// remove values
|
||||
// remove values
|
||||
|
||||
expected.remove(expected.stream().filter(p -> p.name().equals("Alex")).findFirst().get());
|
||||
expected.remove(expected.stream().filter(p -> p.name().equals("Bob")).findFirst().get());
|
||||
session.update().removeAll(book::reviewers, reviewers).where(book::id, Query.eq(id)).sync();
|
||||
expected.remove(expected.stream().filter(p -> p.name().equals("Alex")).findFirst().get());
|
||||
expected.remove(expected.stream().filter(p -> p.name().equals("Bob")).findFirst().get());
|
||||
session.update().removeAll(book::reviewers, reviewers).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualSet =
|
||||
session.select(book::reviewers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualSets(expected, actualSet);
|
||||
actualSet = session.select(book::reviewers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualSets(expected, actualSet);
|
||||
|
||||
// remove full list
|
||||
// remove full list
|
||||
|
||||
session.update().set(book::reviewers, null).where(book::id, Query.eq(id)).sync();
|
||||
session.update().set(book::reviewers, null).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualSet =
|
||||
session.select(book::reviewers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertNull(actualSet);
|
||||
actualSet = session.select(book::reviewers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertNull(actualSet);
|
||||
|
||||
// remove object
|
||||
// remove object
|
||||
|
||||
session.delete().where(book::id, Query.eq(id)).sync();
|
||||
Long cnt = session.count().where(book::id, Query.eq(id)).sync();
|
||||
Assert.assertEquals(Long.valueOf(0), cnt);
|
||||
}
|
||||
session.delete().where(book::id, Query.eq(id)).sync();
|
||||
Long cnt = session.count().where(book::id, Query.eq(id)).sync();
|
||||
Assert.assertEquals(Long.valueOf(0), cnt);
|
||||
}
|
||||
|
||||
private void assertEqualSets(Set<Author> expected, Set<Author> actual) {
|
||||
Assert.assertEquals(expected.size(), actual.size());
|
||||
private void assertEqualSets(Set<Author> expected, Set<Author> actual) {
|
||||
Assert.assertEquals(expected.size(), actual.size());
|
||||
|
||||
for (Author e : expected) {
|
||||
Author a = actual.stream().filter(p -> p.name().equals(e.name())).findFirst().get();
|
||||
Assert.assertEquals(e.city(), a.city());
|
||||
}
|
||||
}
|
||||
for (Author e : expected) {
|
||||
Author a = actual.stream().filter(p -> p.name().equals(e.name())).findFirst().get();
|
||||
Assert.assertEquals(e.city(), a.city());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,126 +18,123 @@ package net.helenus.test.integration.core.tuplecollection;
|
|||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import net.helenus.core.Query;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
import net.helenus.core.Query;
|
||||
|
||||
public class TupleValueMapTest extends TupleCollectionTest {
|
||||
|
||||
@Test
|
||||
public void testValueMapCRUID() throws TimeoutException {
|
||||
@Test
|
||||
public void testValueMapCRUID() throws TimeoutException {
|
||||
|
||||
int id = 999;
|
||||
int id = 999;
|
||||
|
||||
Map<Integer, Section> contents = new HashMap<Integer, Section>();
|
||||
contents.put(1, new SectionImpl("first", 1));
|
||||
contents.put(2, new SectionImpl("second", 2));
|
||||
Map<Integer, Section> contents = new HashMap<Integer, Section>();
|
||||
contents.put(1, new SectionImpl("first", 1));
|
||||
contents.put(2, new SectionImpl("second", 2));
|
||||
|
||||
// CREATE
|
||||
// CREATE
|
||||
|
||||
session.insert().value(book::id, id).value(book::contents, contents).sync();
|
||||
session.insert().value(book::id, id).value(book::contents, contents).sync();
|
||||
|
||||
// READ
|
||||
// READ
|
||||
|
||||
// read full object
|
||||
// read full object
|
||||
|
||||
Book actual = session.select(Book.class).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualMaps(contents, actual.contents());
|
||||
Assert.assertNull(actual.reviewers());
|
||||
Assert.assertNull(actual.writers());
|
||||
Assert.assertNull(actual.notes());
|
||||
Book actual = session.select(Book.class).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualMaps(contents, actual.contents());
|
||||
Assert.assertNull(actual.reviewers());
|
||||
Assert.assertNull(actual.writers());
|
||||
Assert.assertNull(actual.notes());
|
||||
|
||||
// read full map
|
||||
// read full map
|
||||
|
||||
Map<Integer, Section> actualMap =
|
||||
session.select(book::contents).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(contents, actualMap);
|
||||
Map<Integer, Section> actualMap = session.select(book::contents).where(book::id, Query.eq(id)).sync()
|
||||
.findFirst().get()._1;
|
||||
assertEqualMaps(contents, actualMap);
|
||||
|
||||
// read single key-value in map
|
||||
// read single key-value in map
|
||||
|
||||
String cql = session.select(Query.get(book::contents, 1)).where(book::id, Query.eq(id)).cql();
|
||||
String cql = session.select(Query.get(book::contents, 1)).where(book::id, Query.eq(id)).cql();
|
||||
|
||||
System.out.println("Still not supporting cql = " + cql);
|
||||
System.out.println("Still not supporting cql = " + cql);
|
||||
|
||||
// UPDATE
|
||||
// UPDATE
|
||||
|
||||
Map<Integer, Section> expected = new HashMap<Integer, Section>();
|
||||
expected.put(4, new SectionImpl("4", 4));
|
||||
expected.put(5, new SectionImpl("5", 5));
|
||||
Map<Integer, Section> expected = new HashMap<Integer, Section>();
|
||||
expected.put(4, new SectionImpl("4", 4));
|
||||
expected.put(5, new SectionImpl("5", 5));
|
||||
|
||||
session.update().set(book::contents, expected).where(book::id, Query.eq(id)).sync();
|
||||
session.update().set(book::contents, expected).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actual = session.select(Book.class).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualMaps(expected, actual.contents());
|
||||
actual = session.select(Book.class).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualMaps(expected, actual.contents());
|
||||
|
||||
// INSERT
|
||||
// INSERT
|
||||
|
||||
// put operation
|
||||
// put operation
|
||||
|
||||
Section third = new SectionImpl("t", 3);
|
||||
Section third = new SectionImpl("t", 3);
|
||||
|
||||
expected.put(3, third);
|
||||
session.update().put(book::contents, 3, third).where(book::id, Query.eq(id)).sync();
|
||||
expected.put(3, third);
|
||||
session.update().put(book::contents, 3, third).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualMap =
|
||||
session.select(book::contents).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
actualMap = session.select(book::contents).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
|
||||
// putAll operation
|
||||
expected.putAll(contents);
|
||||
session.update().putAll(book::contents, contents).where(book::id, Query.eq(id)).sync();
|
||||
// putAll operation
|
||||
expected.putAll(contents);
|
||||
session.update().putAll(book::contents, contents).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualMap =
|
||||
session.select(book::contents).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
actualMap = session.select(book::contents).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
|
||||
// put existing
|
||||
// put existing
|
||||
|
||||
third = new SectionImpl("t-replace", 3);
|
||||
expected.put(3, third);
|
||||
session.update().put(book::contents, 3, third).where(book::id, Query.eq(id)).sync();
|
||||
third = new SectionImpl("t-replace", 3);
|
||||
expected.put(3, third);
|
||||
session.update().put(book::contents, 3, third).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualMap =
|
||||
session.select(book::contents).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
actualMap = session.select(book::contents).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
|
||||
// DELETE
|
||||
// DELETE
|
||||
|
||||
// remove single key
|
||||
// remove single key
|
||||
|
||||
expected.remove(3);
|
||||
session.update().put(book::contents, 3, null).where(book::id, Query.eq(id)).sync();
|
||||
expected.remove(3);
|
||||
session.update().put(book::contents, 3, null).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualMap =
|
||||
session.select(book::contents).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
actualMap = session.select(book::contents).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
|
||||
// remove full map
|
||||
// remove full map
|
||||
|
||||
session.update().set(book::contents, null).where(book::id, Query.eq(id)).sync();
|
||||
session.update().set(book::contents, null).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualMap =
|
||||
session.select(book::contents).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertNull(actualMap);
|
||||
actualMap = session.select(book::contents).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertNull(actualMap);
|
||||
|
||||
// remove object
|
||||
// remove object
|
||||
|
||||
session.delete().where(book::id, Query.eq(id)).sync();
|
||||
Long cnt = session.count().where(book::id, Query.eq(id)).sync();
|
||||
Assert.assertEquals(Long.valueOf(0), cnt);
|
||||
}
|
||||
session.delete().where(book::id, Query.eq(id)).sync();
|
||||
Long cnt = session.count().where(book::id, Query.eq(id)).sync();
|
||||
Assert.assertEquals(Long.valueOf(0), cnt);
|
||||
}
|
||||
|
||||
private void assertEqualMaps(Map<Integer, Section> expected, Map<Integer, Section> actual) {
|
||||
private void assertEqualMaps(Map<Integer, Section> expected, Map<Integer, Section> actual) {
|
||||
|
||||
Assert.assertEquals(expected.size(), actual.size());
|
||||
Assert.assertEquals(expected.size(), actual.size());
|
||||
|
||||
for (Integer i : expected.keySet()) {
|
||||
Section e = expected.get(i);
|
||||
Section a = actual.get(i);
|
||||
Assert.assertEquals(e.title(), a.title());
|
||||
Assert.assertEquals(e.page(), a.page());
|
||||
}
|
||||
}
|
||||
for (Integer i : expected.keySet()) {
|
||||
Section e = expected.get(i);
|
||||
Section a = actual.get(i);
|
||||
Assert.assertEquals(e.title(), a.title());
|
||||
Assert.assertEquals(e.page(), a.page());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@ import net.helenus.mapping.annotation.UDT;
|
|||
@UDT
|
||||
public interface Author {
|
||||
|
||||
String name();
|
||||
String name();
|
||||
|
||||
String city();
|
||||
String city();
|
||||
}
|
||||
|
|
|
@ -18,22 +18,23 @@ package net.helenus.test.integration.core.udtcollection;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import net.helenus.mapping.annotation.PartitionKey;
|
||||
import net.helenus.mapping.annotation.Table;
|
||||
|
||||
@Table
|
||||
public interface Book {
|
||||
|
||||
@PartitionKey
|
||||
int id();
|
||||
@PartitionKey
|
||||
int id();
|
||||
|
||||
List<Author> authors();
|
||||
List<Author> authors();
|
||||
|
||||
Set<Author> reviewers();
|
||||
Set<Author> reviewers();
|
||||
|
||||
Map<Integer, Section> contents();
|
||||
Map<Integer, Section> contents();
|
||||
|
||||
Map<Section, String> notes();
|
||||
Map<Section, String> notes();
|
||||
|
||||
Map<Section, Author> writers();
|
||||
Map<Section, Author> writers();
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@ import net.helenus.mapping.annotation.UDT;
|
|||
@UDT
|
||||
public interface Section {
|
||||
|
||||
String title();
|
||||
String title();
|
||||
|
||||
int page();
|
||||
int page();
|
||||
}
|
||||
|
|
|
@ -15,125 +15,139 @@
|
|||
*/
|
||||
package net.helenus.test.integration.core.udtcollection;
|
||||
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
|
||||
public abstract class UDTCollectionTest extends AbstractEmbeddedCassandraTest {
|
||||
|
||||
static Book book;
|
||||
static Book book;
|
||||
|
||||
static HelenusSession session;
|
||||
static HelenusSession session;
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeTest() {
|
||||
Helenus.clearDslCache();
|
||||
session = Helenus.init(getSession()).showCql().add(Book.class).autoCreateDrop().get();
|
||||
book = Helenus.dsl(Book.class);
|
||||
}
|
||||
@BeforeClass
|
||||
public static void beforeTest() {
|
||||
Helenus.clearDslCache();
|
||||
session = Helenus.init(getSession()).showCql().add(Book.class).autoCreateDrop().get();
|
||||
book = Helenus.dsl(Book.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void test() {
|
||||
System.out.println(book);
|
||||
}
|
||||
@Test
|
||||
public void test() {
|
||||
System.out.println(book);
|
||||
}
|
||||
|
||||
public static final class AuthorImpl implements Author {
|
||||
public static final class AuthorImpl implements Author {
|
||||
|
||||
String name;
|
||||
String city;
|
||||
String name;
|
||||
String city;
|
||||
|
||||
AuthorImpl(String name, String city) {
|
||||
this.name = name;
|
||||
this.city = city;
|
||||
}
|
||||
AuthorImpl(String name, String city) {
|
||||
this.name = name;
|
||||
this.city = city;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return name;
|
||||
}
|
||||
@Override
|
||||
public String name() {
|
||||
return name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String city() {
|
||||
return city;
|
||||
}
|
||||
@Override
|
||||
public String city() {
|
||||
return city;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + ((city == null) ? 0 : city.hashCode());
|
||||
result = prime * result + ((name == null) ? 0 : name.hashCode());
|
||||
return result;
|
||||
}
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + ((city == null) ? 0 : city.hashCode());
|
||||
result = prime * result + ((name == null) ? 0 : name.hashCode());
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) return true;
|
||||
if (obj == null) return false;
|
||||
if (getClass() != obj.getClass()) return false;
|
||||
AuthorImpl other = (AuthorImpl) obj;
|
||||
if (city == null) {
|
||||
if (other.city != null) return false;
|
||||
} else if (!city.equals(other.city)) return false;
|
||||
if (name == null) {
|
||||
if (other.name != null) return false;
|
||||
} else if (!name.equals(other.name)) return false;
|
||||
return true;
|
||||
}
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj)
|
||||
return true;
|
||||
if (obj == null)
|
||||
return false;
|
||||
if (getClass() != obj.getClass())
|
||||
return false;
|
||||
AuthorImpl other = (AuthorImpl) obj;
|
||||
if (city == null) {
|
||||
if (other.city != null)
|
||||
return false;
|
||||
} else if (!city.equals(other.city))
|
||||
return false;
|
||||
if (name == null) {
|
||||
if (other.name != null)
|
||||
return false;
|
||||
} else if (!name.equals(other.name))
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "AuthorImpl [name=" + name + ", city=" + city + "]";
|
||||
}
|
||||
}
|
||||
@Override
|
||||
public String toString() {
|
||||
return "AuthorImpl [name=" + name + ", city=" + city + "]";
|
||||
}
|
||||
}
|
||||
|
||||
public static final class SectionImpl implements Section {
|
||||
public static final class SectionImpl implements Section {
|
||||
|
||||
String title;
|
||||
int page;
|
||||
String title;
|
||||
int page;
|
||||
|
||||
SectionImpl(String title, int page) {
|
||||
this.title = title;
|
||||
this.page = page;
|
||||
}
|
||||
SectionImpl(String title, int page) {
|
||||
this.title = title;
|
||||
this.page = page;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String title() {
|
||||
return title;
|
||||
}
|
||||
@Override
|
||||
public String title() {
|
||||
return title;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int page() {
|
||||
return page;
|
||||
}
|
||||
@Override
|
||||
public int page() {
|
||||
return page;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + page;
|
||||
result = prime * result + ((title == null) ? 0 : title.hashCode());
|
||||
return result;
|
||||
}
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + page;
|
||||
result = prime * result + ((title == null) ? 0 : title.hashCode());
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) return true;
|
||||
if (obj == null) return false;
|
||||
if (getClass() != obj.getClass()) return false;
|
||||
SectionImpl other = (SectionImpl) obj;
|
||||
if (page != other.page) return false;
|
||||
if (title == null) {
|
||||
if (other.title != null) return false;
|
||||
} else if (!title.equals(other.title)) return false;
|
||||
return true;
|
||||
}
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj)
|
||||
return true;
|
||||
if (obj == null)
|
||||
return false;
|
||||
if (getClass() != obj.getClass())
|
||||
return false;
|
||||
SectionImpl other = (SectionImpl) obj;
|
||||
if (page != other.page)
|
||||
return false;
|
||||
if (title == null) {
|
||||
if (other.title != null)
|
||||
return false;
|
||||
} else if (!title.equals(other.title))
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "SectionImpl [title=" + title + ", page=" + page + "]";
|
||||
}
|
||||
}
|
||||
@Override
|
||||
public String toString() {
|
||||
return "SectionImpl [title=" + title + ", page=" + page + "]";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,121 +21,120 @@ import static net.helenus.core.Query.get;
|
|||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
public class UDTKeyMapTest extends UDTCollectionTest {
|
||||
|
||||
@Test
|
||||
public void testKeyMapCRUID() throws TimeoutException {
|
||||
@Test
|
||||
public void testKeyMapCRUID() throws TimeoutException {
|
||||
|
||||
int id = 888;
|
||||
int id = 888;
|
||||
|
||||
Map<Section, String> notes = new HashMap<Section, String>();
|
||||
notes.put(new SectionImpl("first", 1), "value1");
|
||||
notes.put(new SectionImpl("second", 2), "value2");
|
||||
Map<Section, String> notes = new HashMap<Section, String>();
|
||||
notes.put(new SectionImpl("first", 1), "value1");
|
||||
notes.put(new SectionImpl("second", 2), "value2");
|
||||
|
||||
// CREATE
|
||||
// CREATE
|
||||
|
||||
session.insert().value(book::id, id).value(book::notes, notes).sync();
|
||||
session.insert().value(book::id, id).value(book::notes, notes).sync();
|
||||
|
||||
// READ
|
||||
// READ
|
||||
|
||||
// read full object
|
||||
// read full object
|
||||
|
||||
Book actual = session.<Book>select(book).where(book::id, eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualMaps(notes, actual.notes());
|
||||
Assert.assertNull(actual.reviewers());
|
||||
Assert.assertNull(actual.writers());
|
||||
Assert.assertNull(actual.contents());
|
||||
Book actual = session.<Book>select(book).where(book::id, eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualMaps(notes, actual.notes());
|
||||
Assert.assertNull(actual.reviewers());
|
||||
Assert.assertNull(actual.writers());
|
||||
Assert.assertNull(actual.contents());
|
||||
|
||||
// read full map
|
||||
// read full map
|
||||
|
||||
Map<Section, String> actualMap =
|
||||
session.select(book::notes).where(book::id, eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(notes, actualMap);
|
||||
Map<Section, String> actualMap = session.select(book::notes).where(book::id, eq(id)).sync().findFirst()
|
||||
.get()._1;
|
||||
assertEqualMaps(notes, actualMap);
|
||||
|
||||
// read single key-value in map
|
||||
// read single key-value in map
|
||||
|
||||
String cql =
|
||||
session.select(get(book::notes, new SectionImpl("first", 1))).where(book::id, eq(id)).cql();
|
||||
String cql = session.select(get(book::notes, new SectionImpl("first", 1))).where(book::id, eq(id)).cql();
|
||||
|
||||
System.out.println("Still not supporting cql = " + cql);
|
||||
System.out.println("Still not supporting cql = " + cql);
|
||||
|
||||
// UPDATE
|
||||
// UPDATE
|
||||
|
||||
Map<Section, String> expected = new HashMap<Section, String>();
|
||||
expected.put(new SectionImpl("f", 1), "v1");
|
||||
expected.put(new SectionImpl("s", 1), "v2");
|
||||
Map<Section, String> expected = new HashMap<Section, String>();
|
||||
expected.put(new SectionImpl("f", 1), "v1");
|
||||
expected.put(new SectionImpl("s", 1), "v2");
|
||||
|
||||
session.update().set(book::notes, expected).where(book::id, eq(id)).sync();
|
||||
session.update().set(book::notes, expected).where(book::id, eq(id)).sync();
|
||||
|
||||
actual = session.<Book>select(book).where(book::id, eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualMaps(expected, actual.notes());
|
||||
actual = session.<Book>select(book).where(book::id, eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualMaps(expected, actual.notes());
|
||||
|
||||
// INSERT
|
||||
// INSERT
|
||||
|
||||
// put operation
|
||||
// put operation
|
||||
|
||||
Section third = new SectionImpl("t", 3);
|
||||
Section third = new SectionImpl("t", 3);
|
||||
|
||||
expected.put(third, "v3");
|
||||
session.update().put(book::notes, third, "v3").where(book::id, eq(id)).sync();
|
||||
expected.put(third, "v3");
|
||||
session.update().put(book::notes, third, "v3").where(book::id, eq(id)).sync();
|
||||
|
||||
actualMap = session.select(book::notes).where(book::id, eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
actualMap = session.select(book::notes).where(book::id, eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
|
||||
// putAll operation
|
||||
expected.putAll(notes);
|
||||
session.update().putAll(book::notes, notes).where(book::id, eq(id)).sync();
|
||||
// putAll operation
|
||||
expected.putAll(notes);
|
||||
session.update().putAll(book::notes, notes).where(book::id, eq(id)).sync();
|
||||
|
||||
actualMap = session.select(book::notes).where(book::id, eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
actualMap = session.select(book::notes).where(book::id, eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
|
||||
// put existing
|
||||
// put existing
|
||||
|
||||
expected.put(third, "v33");
|
||||
session.update().put(book::notes, third, "v33").where(book::id, eq(id)).sync();
|
||||
expected.put(third, "v33");
|
||||
session.update().put(book::notes, third, "v33").where(book::id, eq(id)).sync();
|
||||
|
||||
actualMap = session.select(book::notes).where(book::id, eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
actualMap = session.select(book::notes).where(book::id, eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
|
||||
// DELETE
|
||||
// DELETE
|
||||
|
||||
// remove single key
|
||||
// remove single key
|
||||
|
||||
expected.remove(third);
|
||||
session.update().put(book::notes, third, null).where(book::id, eq(id)).sync();
|
||||
expected.remove(third);
|
||||
session.update().put(book::notes, third, null).where(book::id, eq(id)).sync();
|
||||
|
||||
actualMap = session.select(book::notes).where(book::id, eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
actualMap = session.select(book::notes).where(book::id, eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
|
||||
// remove full map
|
||||
// remove full map
|
||||
|
||||
session.update().set(book::notes, null).where(book::id, eq(id)).sync();
|
||||
session.update().set(book::notes, null).where(book::id, eq(id)).sync();
|
||||
|
||||
actualMap = session.select(book::notes).where(book::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertNull(actualMap);
|
||||
actualMap = session.select(book::notes).where(book::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertNull(actualMap);
|
||||
|
||||
// remove object
|
||||
// remove object
|
||||
|
||||
session.delete().where(book::id, eq(id)).sync();
|
||||
Long cnt = session.count().where(book::id, eq(id)).sync();
|
||||
Assert.assertEquals(Long.valueOf(0), cnt);
|
||||
}
|
||||
session.delete().where(book::id, eq(id)).sync();
|
||||
Long cnt = session.count().where(book::id, eq(id)).sync();
|
||||
Assert.assertEquals(Long.valueOf(0), cnt);
|
||||
}
|
||||
|
||||
private void assertEqualMaps(Map<Section, String> expected, Map<Section, String> actual) {
|
||||
private void assertEqualMaps(Map<Section, String> expected, Map<Section, String> actual) {
|
||||
|
||||
Assert.assertEquals(expected.size(), actual.size());
|
||||
Assert.assertEquals(expected.size(), actual.size());
|
||||
|
||||
for (Section e : expected.keySet()) {
|
||||
Section a =
|
||||
actual.keySet().stream().filter(p -> p.title().equals(e.title())).findFirst().get();
|
||||
Assert.assertEquals(e.title(), a.title());
|
||||
Assert.assertEquals(e.page(), a.page());
|
||||
Assert.assertEquals(expected.get(e), actual.get(a));
|
||||
}
|
||||
}
|
||||
for (Section e : expected.keySet()) {
|
||||
Section a = actual.keySet().stream().filter(p -> p.title().equals(e.title())).findFirst().get();
|
||||
Assert.assertEquals(e.title(), a.title());
|
||||
Assert.assertEquals(e.page(), a.page());
|
||||
Assert.assertEquals(expected.get(e), actual.get(a));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,157 +18,145 @@ package net.helenus.test.integration.core.udtcollection;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import net.helenus.core.Query;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
import net.helenus.core.Query;
|
||||
|
||||
public class UDTListTest extends UDTCollectionTest {
|
||||
|
||||
@Test
|
||||
public void testListCRUID() throws TimeoutException {
|
||||
@Test
|
||||
public void testListCRUID() throws TimeoutException {
|
||||
|
||||
int id = 777;
|
||||
int id = 777;
|
||||
|
||||
List<Author> authors = new ArrayList<Author>();
|
||||
authors.add(new AuthorImpl("Alex", "San Jose"));
|
||||
authors.add(new AuthorImpl("Bob", "San Francisco"));
|
||||
List<Author> authors = new ArrayList<Author>();
|
||||
authors.add(new AuthorImpl("Alex", "San Jose"));
|
||||
authors.add(new AuthorImpl("Bob", "San Francisco"));
|
||||
|
||||
// CREATE
|
||||
// CREATE
|
||||
|
||||
session.insert().value(book::id, id).value(book::authors, authors).sync();
|
||||
session.insert().value(book::id, id).value(book::authors, authors).sync();
|
||||
|
||||
// READ
|
||||
// READ
|
||||
|
||||
// read full object
|
||||
// read full object
|
||||
|
||||
Book actual = session.<Book>select(book).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualLists(authors, actual.authors());
|
||||
Assert.assertNull(actual.reviewers());
|
||||
Assert.assertNull(actual.contents());
|
||||
Book actual = session.<Book>select(book).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualLists(authors, actual.authors());
|
||||
Assert.assertNull(actual.reviewers());
|
||||
Assert.assertNull(actual.contents());
|
||||
|
||||
// read full list
|
||||
// read full list
|
||||
|
||||
List<Author> actualList =
|
||||
session.select(book::authors).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualLists(authors, actualList);
|
||||
List<Author> actualList = session.select(book::authors).where(book::id, Query.eq(id)).sync().findFirst()
|
||||
.get()._1;
|
||||
assertEqualLists(authors, actualList);
|
||||
|
||||
// read single value by index
|
||||
// read single value by index
|
||||
|
||||
String cql = session.select(Query.getIdx(book::authors, 1)).where(book::id, Query.eq(id)).cql();
|
||||
String cql = session.select(Query.getIdx(book::authors, 1)).where(book::id, Query.eq(id)).cql();
|
||||
|
||||
System.out.println("Still not supporting cql = " + cql);
|
||||
System.out.println("Still not supporting cql = " + cql);
|
||||
|
||||
// UPDATE
|
||||
// UPDATE
|
||||
|
||||
List<Author> expected = new ArrayList<Author>();
|
||||
expected.add(new AuthorImpl("Unknown", "City 17"));
|
||||
List<Author> expected = new ArrayList<Author>();
|
||||
expected.add(new AuthorImpl("Unknown", "City 17"));
|
||||
|
||||
session.update().set(book::authors, expected).where(book::id, Query.eq(id)).sync();
|
||||
session.update().set(book::authors, expected).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actual = session.<Book>select(book).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualLists(expected, actual.authors());
|
||||
actual = session.<Book>select(book).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualLists(expected, actual.authors());
|
||||
|
||||
// INSERT
|
||||
// INSERT
|
||||
|
||||
// prepend operation
|
||||
// prepend operation
|
||||
|
||||
expected.add(0, new AuthorImpl("Prepend", "PrependCity"));
|
||||
session
|
||||
.update()
|
||||
.prepend(book::authors, new AuthorImpl("Prepend", "PrependCity"))
|
||||
.where(book::id, Query.eq(id))
|
||||
.sync();
|
||||
expected.add(0, new AuthorImpl("Prepend", "PrependCity"));
|
||||
session.update().prepend(book::authors, new AuthorImpl("Prepend", "PrependCity")).where(book::id, Query.eq(id))
|
||||
.sync();
|
||||
|
||||
actualList =
|
||||
session.select(book::authors).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualLists(expected, actualList);
|
||||
actualList = session.select(book::authors).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualLists(expected, actualList);
|
||||
|
||||
// append operation
|
||||
// append operation
|
||||
|
||||
expected.add(new AuthorImpl("Append", "AppendCity"));
|
||||
session
|
||||
.update()
|
||||
.append(book::authors, new AuthorImpl("Append", "AppendCity"))
|
||||
.where(book::id, Query.eq(id))
|
||||
.sync();
|
||||
expected.add(new AuthorImpl("Append", "AppendCity"));
|
||||
session.update().append(book::authors, new AuthorImpl("Append", "AppendCity")).where(book::id, Query.eq(id))
|
||||
.sync();
|
||||
|
||||
actualList =
|
||||
session.select(book::authors).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualLists(expected, actualList);
|
||||
actualList = session.select(book::authors).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualLists(expected, actualList);
|
||||
|
||||
// prependAll operation
|
||||
expected.addAll(0, authors);
|
||||
session.update().prependAll(book::authors, authors).where(book::id, Query.eq(id)).sync();
|
||||
// prependAll operation
|
||||
expected.addAll(0, authors);
|
||||
session.update().prependAll(book::authors, authors).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualList =
|
||||
session.select(book::authors).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualLists(expected, actualList);
|
||||
actualList = session.select(book::authors).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualLists(expected, actualList);
|
||||
|
||||
// appendAll operation
|
||||
expected.addAll(authors);
|
||||
session.update().appendAll(book::authors, authors).where(book::id, Query.eq(id)).sync();
|
||||
// appendAll operation
|
||||
expected.addAll(authors);
|
||||
session.update().appendAll(book::authors, authors).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualList =
|
||||
session.select(book::authors).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualLists(expected, actualList);
|
||||
actualList = session.select(book::authors).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualLists(expected, actualList);
|
||||
|
||||
// set by Index
|
||||
// set by Index
|
||||
|
||||
Author inserted = new AuthorImpl("Insert", "InsertCity");
|
||||
expected.set(5, inserted);
|
||||
session.update().setIdx(book::authors, 5, inserted).where(book::id, Query.eq(id)).sync();
|
||||
Author inserted = new AuthorImpl("Insert", "InsertCity");
|
||||
expected.set(5, inserted);
|
||||
session.update().setIdx(book::authors, 5, inserted).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualList =
|
||||
session.select(book::authors).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualLists(expected, actualList);
|
||||
actualList = session.select(book::authors).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualLists(expected, actualList);
|
||||
|
||||
// DELETE
|
||||
// DELETE
|
||||
|
||||
// remove single value
|
||||
// remove single value
|
||||
|
||||
expected.remove(inserted);
|
||||
session.update().discard(book::authors, inserted).where(book::id, Query.eq(id)).sync();
|
||||
expected.remove(inserted);
|
||||
session.update().discard(book::authors, inserted).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualList =
|
||||
session.select(book::authors).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualLists(expected, actualList);
|
||||
actualList = session.select(book::authors).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualLists(expected, actualList);
|
||||
|
||||
// remove values
|
||||
// remove values
|
||||
|
||||
expected.removeAll(authors);
|
||||
session.update().discardAll(book::authors, authors).where(book::id, Query.eq(id)).sync();
|
||||
expected.removeAll(authors);
|
||||
session.update().discardAll(book::authors, authors).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualList =
|
||||
session.select(book::authors).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualLists(expected, actualList);
|
||||
actualList = session.select(book::authors).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualLists(expected, actualList);
|
||||
|
||||
// remove full list
|
||||
// remove full list
|
||||
|
||||
session.update().set(book::authors, null).where(book::id, Query.eq(id)).sync();
|
||||
session.update().set(book::authors, null).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualList =
|
||||
session.select(book::authors).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertNull(actualList);
|
||||
actualList = session.select(book::authors).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertNull(actualList);
|
||||
|
||||
// remove object
|
||||
// remove object
|
||||
|
||||
session.delete().where(book::id, Query.eq(id)).sync();
|
||||
Long cnt = session.count().where(book::id, Query.eq(id)).sync();
|
||||
Assert.assertEquals(Long.valueOf(0), cnt);
|
||||
}
|
||||
session.delete().where(book::id, Query.eq(id)).sync();
|
||||
Long cnt = session.count().where(book::id, Query.eq(id)).sync();
|
||||
Assert.assertEquals(Long.valueOf(0), cnt);
|
||||
}
|
||||
|
||||
private void assertEqualLists(List<Author> expected, List<Author> actual) {
|
||||
Assert.assertEquals(expected.size(), actual.size());
|
||||
private void assertEqualLists(List<Author> expected, List<Author> actual) {
|
||||
Assert.assertEquals(expected.size(), actual.size());
|
||||
|
||||
int size = expected.size();
|
||||
int size = expected.size();
|
||||
|
||||
for (int i = 0; i != size; ++i) {
|
||||
Author e = expected.get(i);
|
||||
Author a = actual.get(i);
|
||||
Assert.assertEquals(e.name(), a.name());
|
||||
Assert.assertEquals(e.city(), a.city());
|
||||
}
|
||||
}
|
||||
for (int i = 0; i != size; ++i) {
|
||||
Author e = expected.get(i);
|
||||
Author a = actual.get(i);
|
||||
Assert.assertEquals(e.name(), a.name());
|
||||
Assert.assertEquals(e.city(), a.city());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,136 +18,129 @@ package net.helenus.test.integration.core.udtcollection;
|
|||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import net.helenus.core.Query;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
import net.helenus.core.Query;
|
||||
|
||||
public class UDTMapTest extends UDTCollectionTest {
|
||||
|
||||
@Test
|
||||
public void testMapCRUID() throws TimeoutException {
|
||||
@Test
|
||||
public void testMapCRUID() throws TimeoutException {
|
||||
|
||||
int id = 333;
|
||||
int id = 333;
|
||||
|
||||
Map<Section, Author> writers = new HashMap<Section, Author>();
|
||||
writers.put(new SectionImpl("first", 1), new AuthorImpl("Alex", "San Jose"));
|
||||
writers.put(new SectionImpl("second", 2), new AuthorImpl("Bob", "San Francisco"));
|
||||
Map<Section, Author> writers = new HashMap<Section, Author>();
|
||||
writers.put(new SectionImpl("first", 1), new AuthorImpl("Alex", "San Jose"));
|
||||
writers.put(new SectionImpl("second", 2), new AuthorImpl("Bob", "San Francisco"));
|
||||
|
||||
// CREATE
|
||||
// CREATE
|
||||
|
||||
session.insert().value(book::id, id).value(book::writers, writers).sync();
|
||||
session.insert().value(book::id, id).value(book::writers, writers).sync();
|
||||
|
||||
// READ
|
||||
// READ
|
||||
|
||||
// read full object
|
||||
// read full object
|
||||
|
||||
Book actual = session.<Book>select(book).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualMaps(writers, actual.writers());
|
||||
Assert.assertNull(actual.reviewers());
|
||||
Assert.assertNull(actual.notes());
|
||||
Assert.assertNull(actual.contents());
|
||||
Book actual = session.<Book>select(book).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualMaps(writers, actual.writers());
|
||||
Assert.assertNull(actual.reviewers());
|
||||
Assert.assertNull(actual.notes());
|
||||
Assert.assertNull(actual.contents());
|
||||
|
||||
// read full map
|
||||
// read full map
|
||||
|
||||
Map<Section, Author> actualMap =
|
||||
session.select(book::writers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(writers, actualMap);
|
||||
Map<Section, Author> actualMap = session.select(book::writers).where(book::id, Query.eq(id)).sync().findFirst()
|
||||
.get()._1;
|
||||
assertEqualMaps(writers, actualMap);
|
||||
|
||||
// read single key-value in map
|
||||
// read single key-value in map
|
||||
|
||||
String cql =
|
||||
session
|
||||
.select(Query.get(book::writers, new SectionImpl("first", 1)))
|
||||
.where(book::id, Query.eq(id))
|
||||
.cql();
|
||||
String cql = session.select(Query.get(book::writers, new SectionImpl("first", 1))).where(book::id, Query.eq(id))
|
||||
.cql();
|
||||
|
||||
System.out.println("Still not supporting cql = " + cql);
|
||||
System.out.println("Still not supporting cql = " + cql);
|
||||
|
||||
// UPDATE
|
||||
// UPDATE
|
||||
|
||||
Map<Section, Author> expected = new HashMap<Section, Author>();
|
||||
expected.put(new SectionImpl("f", 1), new AuthorImpl("A", "SJ"));
|
||||
expected.put(new SectionImpl("s", 1), new AuthorImpl("B", "SF"));
|
||||
Map<Section, Author> expected = new HashMap<Section, Author>();
|
||||
expected.put(new SectionImpl("f", 1), new AuthorImpl("A", "SJ"));
|
||||
expected.put(new SectionImpl("s", 1), new AuthorImpl("B", "SF"));
|
||||
|
||||
session.update().set(book::writers, expected).where(book::id, Query.eq(id)).sync();
|
||||
session.update().set(book::writers, expected).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actual = session.<Book>select(book).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualMaps(expected, actual.writers());
|
||||
actual = session.<Book>select(book).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualMaps(expected, actual.writers());
|
||||
|
||||
// INSERT
|
||||
// INSERT
|
||||
|
||||
// put operation
|
||||
// put operation
|
||||
|
||||
Section third = new SectionImpl("t", 3);
|
||||
Author unk = new AuthorImpl("Unk", "City 17");
|
||||
Section third = new SectionImpl("t", 3);
|
||||
Author unk = new AuthorImpl("Unk", "City 17");
|
||||
|
||||
expected.put(third, unk);
|
||||
session.update().put(book::writers, third, unk).where(book::id, Query.eq(id)).sync();
|
||||
expected.put(third, unk);
|
||||
session.update().put(book::writers, third, unk).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualMap =
|
||||
session.select(book::writers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
actualMap = session.select(book::writers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
|
||||
// putAll operation
|
||||
expected.putAll(writers);
|
||||
session.update().putAll(book::writers, writers).where(book::id, Query.eq(id)).sync();
|
||||
// putAll operation
|
||||
expected.putAll(writers);
|
||||
session.update().putAll(book::writers, writers).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualMap =
|
||||
session.select(book::writers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
actualMap = session.select(book::writers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
|
||||
// put existing
|
||||
// put existing
|
||||
|
||||
expected.put(third, unk);
|
||||
session.update().put(book::writers, third, unk).where(book::id, Query.eq(id)).sync();
|
||||
expected.put(third, unk);
|
||||
session.update().put(book::writers, third, unk).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualMap =
|
||||
session.select(book::writers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
actualMap = session.select(book::writers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
|
||||
// DELETE
|
||||
// DELETE
|
||||
|
||||
// remove single key
|
||||
// remove single key
|
||||
|
||||
expected.remove(third);
|
||||
session.update().put(book::writers, third, null).where(book::id, Query.eq(id)).sync();
|
||||
expected.remove(third);
|
||||
session.update().put(book::writers, third, null).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualMap =
|
||||
session.select(book::writers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
actualMap = session.select(book::writers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
|
||||
// remove full map
|
||||
// remove full map
|
||||
|
||||
session.update().set(book::writers, null).where(book::id, Query.eq(id)).sync();
|
||||
session.update().set(book::writers, null).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualMap =
|
||||
session.select(book::writers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertNull(actualMap);
|
||||
actualMap = session.select(book::writers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertNull(actualMap);
|
||||
|
||||
// remove object
|
||||
// remove object
|
||||
|
||||
session.delete().where(book::id, Query.eq(id)).sync();
|
||||
Long cnt = session.count().where(book::id, Query.eq(id)).sync();
|
||||
Assert.assertEquals(Long.valueOf(0), cnt);
|
||||
}
|
||||
session.delete().where(book::id, Query.eq(id)).sync();
|
||||
Long cnt = session.count().where(book::id, Query.eq(id)).sync();
|
||||
Assert.assertEquals(Long.valueOf(0), cnt);
|
||||
}
|
||||
|
||||
private void assertEqualMaps(Map<Section, Author> expected, Map<Section, Author> actual) {
|
||||
private void assertEqualMaps(Map<Section, Author> expected, Map<Section, Author> actual) {
|
||||
|
||||
Assert.assertEquals(expected.size(), actual.size());
|
||||
Assert.assertEquals(expected.size(), actual.size());
|
||||
|
||||
for (Section e : expected.keySet()) {
|
||||
Section a =
|
||||
actual.keySet().stream().filter(p -> p.title().equals(e.title())).findFirst().get();
|
||||
Assert.assertEquals(e.title(), a.title());
|
||||
Assert.assertEquals(e.page(), a.page());
|
||||
for (Section e : expected.keySet()) {
|
||||
Section a = actual.keySet().stream().filter(p -> p.title().equals(e.title())).findFirst().get();
|
||||
Assert.assertEquals(e.title(), a.title());
|
||||
Assert.assertEquals(e.page(), a.page());
|
||||
|
||||
Author ea = expected.get(e);
|
||||
Author aa = actual.get(a);
|
||||
Author ea = expected.get(e);
|
||||
Author aa = actual.get(a);
|
||||
|
||||
Assert.assertEquals(ea.name(), aa.name());
|
||||
Assert.assertEquals(ea.city(), aa.city());
|
||||
}
|
||||
}
|
||||
Assert.assertEquals(ea.name(), aa.name());
|
||||
Assert.assertEquals(ea.city(), aa.city());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,107 +18,100 @@ package net.helenus.test.integration.core.udtcollection;
|
|||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import net.helenus.core.Query;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
import net.helenus.core.Query;
|
||||
|
||||
public class UDTSetTest extends UDTCollectionTest {
|
||||
|
||||
@Test
|
||||
public void testSetCRUID() throws TimeoutException {
|
||||
@Test
|
||||
public void testSetCRUID() throws TimeoutException {
|
||||
|
||||
int id = 555;
|
||||
int id = 555;
|
||||
|
||||
// CREATE
|
||||
// CREATE
|
||||
|
||||
Set<Author> reviewers = new HashSet<Author>();
|
||||
reviewers.add(new AuthorImpl("Alex", "San Jose"));
|
||||
reviewers.add(new AuthorImpl("Bob", "San Francisco"));
|
||||
Set<Author> reviewers = new HashSet<Author>();
|
||||
reviewers.add(new AuthorImpl("Alex", "San Jose"));
|
||||
reviewers.add(new AuthorImpl("Bob", "San Francisco"));
|
||||
|
||||
session.insert().value(book::id, id).value(book::reviewers, reviewers).sync();
|
||||
session.insert().value(book::id, id).value(book::reviewers, reviewers).sync();
|
||||
|
||||
// READ
|
||||
// READ
|
||||
|
||||
Book actual = session.select(Book.class).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualSets(reviewers, actual.reviewers());
|
||||
Book actual = session.select(Book.class).where(book::id, Query.eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualSets(reviewers, actual.reviewers());
|
||||
|
||||
// UPDATE
|
||||
// UPDATE
|
||||
|
||||
Set<Author> expected = new HashSet<Author>();
|
||||
expected.add(new AuthorImpl("Craig", "Los Altos"));
|
||||
Set<Author> expected = new HashSet<Author>();
|
||||
expected.add(new AuthorImpl("Craig", "Los Altos"));
|
||||
|
||||
session.update().set(book::reviewers, expected).where(book::id, Query.eq(id)).sync();
|
||||
session.update().set(book::reviewers, expected).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
Set<Author> actualSet =
|
||||
session.select(book::reviewers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualSets(expected, actualSet);
|
||||
Set<Author> actualSet = session.select(book::reviewers).where(book::id, Query.eq(id)).sync().findFirst()
|
||||
.get()._1;
|
||||
assertEqualSets(expected, actualSet);
|
||||
|
||||
// add operation
|
||||
// add operation
|
||||
|
||||
expected.add(new AuthorImpl("Add", "AddCity"));
|
||||
session
|
||||
.update()
|
||||
.add(book::reviewers, new AuthorImpl("Add", "AddCity"))
|
||||
.where(book::id, Query.eq(id))
|
||||
.sync();
|
||||
expected.add(new AuthorImpl("Add", "AddCity"));
|
||||
session.update().add(book::reviewers, new AuthorImpl("Add", "AddCity")).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualSet =
|
||||
session.select(book::reviewers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualSets(expected, actualSet);
|
||||
actualSet = session.select(book::reviewers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualSets(expected, actualSet);
|
||||
|
||||
// addAll operation
|
||||
expected.addAll(reviewers);
|
||||
session.update().addAll(book::reviewers, reviewers).where(book::id, Query.eq(id)).sync();
|
||||
// addAll operation
|
||||
expected.addAll(reviewers);
|
||||
session.update().addAll(book::reviewers, reviewers).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualSet =
|
||||
session.select(book::reviewers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualSets(expected, actualSet);
|
||||
actualSet = session.select(book::reviewers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualSets(expected, actualSet);
|
||||
|
||||
// DELETE
|
||||
// DELETE
|
||||
|
||||
// remove single value
|
||||
// remove single value
|
||||
|
||||
Author a = expected.stream().filter(p -> p.name().equals("Add")).findFirst().get();
|
||||
expected.remove(a);
|
||||
Author a = expected.stream().filter(p -> p.name().equals("Add")).findFirst().get();
|
||||
expected.remove(a);
|
||||
|
||||
session.update().remove(book::reviewers, a).where(book::id, Query.eq(id)).sync();
|
||||
session.update().remove(book::reviewers, a).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualSet =
|
||||
session.select(book::reviewers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualSets(expected, actualSet);
|
||||
actualSet = session.select(book::reviewers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualSets(expected, actualSet);
|
||||
|
||||
// remove values
|
||||
// remove values
|
||||
|
||||
expected.remove(expected.stream().filter(p -> p.name().equals("Alex")).findFirst().get());
|
||||
expected.remove(expected.stream().filter(p -> p.name().equals("Bob")).findFirst().get());
|
||||
session.update().removeAll(book::reviewers, reviewers).where(book::id, Query.eq(id)).sync();
|
||||
expected.remove(expected.stream().filter(p -> p.name().equals("Alex")).findFirst().get());
|
||||
expected.remove(expected.stream().filter(p -> p.name().equals("Bob")).findFirst().get());
|
||||
session.update().removeAll(book::reviewers, reviewers).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualSet =
|
||||
session.select(book::reviewers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualSets(expected, actualSet);
|
||||
actualSet = session.select(book::reviewers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualSets(expected, actualSet);
|
||||
|
||||
// remove full list
|
||||
// remove full list
|
||||
|
||||
session.update().set(book::reviewers, null).where(book::id, Query.eq(id)).sync();
|
||||
session.update().set(book::reviewers, null).where(book::id, Query.eq(id)).sync();
|
||||
|
||||
actualSet =
|
||||
session.select(book::reviewers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertNull(actualSet);
|
||||
actualSet = session.select(book::reviewers).where(book::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertNull(actualSet);
|
||||
|
||||
// remove object
|
||||
// remove object
|
||||
|
||||
session.delete().where(book::id, Query.eq(id)).sync();
|
||||
Long cnt = session.count().where(book::id, Query.eq(id)).sync();
|
||||
Assert.assertEquals(Long.valueOf(0), cnt);
|
||||
}
|
||||
session.delete().where(book::id, Query.eq(id)).sync();
|
||||
Long cnt = session.count().where(book::id, Query.eq(id)).sync();
|
||||
Assert.assertEquals(Long.valueOf(0), cnt);
|
||||
}
|
||||
|
||||
private void assertEqualSets(Set<Author> expected, Set<Author> actual) {
|
||||
Assert.assertEquals(expected.size(), actual.size());
|
||||
private void assertEqualSets(Set<Author> expected, Set<Author> actual) {
|
||||
Assert.assertEquals(expected.size(), actual.size());
|
||||
|
||||
for (Author e : expected) {
|
||||
Author a = actual.stream().filter(p -> p.name().equals(e.name())).findFirst().get();
|
||||
Assert.assertEquals(e.city(), a.city());
|
||||
}
|
||||
}
|
||||
for (Author e : expected) {
|
||||
Author a = actual.stream().filter(p -> p.name().equals(e.name())).findFirst().get();
|
||||
Assert.assertEquals(e.city(), a.city());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,120 +21,121 @@ import static net.helenus.core.Query.get;
|
|||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
public class UDTValueMapTest extends UDTCollectionTest {
|
||||
|
||||
@Test
|
||||
public void testValueMapCRUID() throws TimeoutException {
|
||||
@Test
|
||||
public void testValueMapCRUID() throws TimeoutException {
|
||||
|
||||
int id = 999;
|
||||
int id = 999;
|
||||
|
||||
Map<Integer, Section> contents = new HashMap<Integer, Section>();
|
||||
contents.put(1, new SectionImpl("first", 1));
|
||||
contents.put(2, new SectionImpl("second", 2));
|
||||
Map<Integer, Section> contents = new HashMap<Integer, Section>();
|
||||
contents.put(1, new SectionImpl("first", 1));
|
||||
contents.put(2, new SectionImpl("second", 2));
|
||||
|
||||
// CREATE
|
||||
// CREATE
|
||||
|
||||
session.insert().value(book::id, id).value(book::contents, contents).sync();
|
||||
session.insert().value(book::id, id).value(book::contents, contents).sync();
|
||||
|
||||
// READ
|
||||
// READ
|
||||
|
||||
// read full object
|
||||
// read full object
|
||||
|
||||
Book actual = session.select(Book.class).where(book::id, eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualMaps(contents, actual.contents());
|
||||
Assert.assertNull(actual.reviewers());
|
||||
Assert.assertNull(actual.writers());
|
||||
Assert.assertNull(actual.notes());
|
||||
Book actual = session.select(Book.class).where(book::id, eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualMaps(contents, actual.contents());
|
||||
Assert.assertNull(actual.reviewers());
|
||||
Assert.assertNull(actual.writers());
|
||||
Assert.assertNull(actual.notes());
|
||||
|
||||
// read full map
|
||||
// read full map
|
||||
|
||||
Map<Integer, Section> actualMap =
|
||||
session.select(book::contents).where(book::id, eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(contents, actualMap);
|
||||
Map<Integer, Section> actualMap = session.select(book::contents).where(book::id, eq(id)).sync().findFirst()
|
||||
.get()._1;
|
||||
assertEqualMaps(contents, actualMap);
|
||||
|
||||
// read single key-value in map
|
||||
// read single key-value in map
|
||||
|
||||
String cql = session.select(get(book::contents, 1)).where(book::id, eq(id)).cql();
|
||||
String cql = session.select(get(book::contents, 1)).where(book::id, eq(id)).cql();
|
||||
|
||||
System.out.println("Still not supporting cql = " + cql);
|
||||
System.out.println("Still not supporting cql = " + cql);
|
||||
|
||||
// UPDATE
|
||||
// UPDATE
|
||||
|
||||
Map<Integer, Section> expected = new HashMap<Integer, Section>();
|
||||
expected.put(4, new SectionImpl("4", 4));
|
||||
expected.put(5, new SectionImpl("5", 5));
|
||||
Map<Integer, Section> expected = new HashMap<Integer, Section>();
|
||||
expected.put(4, new SectionImpl("4", 4));
|
||||
expected.put(5, new SectionImpl("5", 5));
|
||||
|
||||
session.update().set(book::contents, expected).where(book::id, eq(id)).sync();
|
||||
session.update().set(book::contents, expected).where(book::id, eq(id)).sync();
|
||||
|
||||
actual = session.select(Book.class).where(book::id, eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualMaps(expected, actual.contents());
|
||||
actual = session.select(Book.class).where(book::id, eq(id)).sync().findFirst().get();
|
||||
Assert.assertEquals(id, actual.id());
|
||||
assertEqualMaps(expected, actual.contents());
|
||||
|
||||
// INSERT
|
||||
// INSERT
|
||||
|
||||
// put operation
|
||||
// put operation
|
||||
|
||||
Section third = new SectionImpl("t", 3);
|
||||
Section third = new SectionImpl("t", 3);
|
||||
|
||||
expected.put(3, third);
|
||||
session.update().put(book::contents, 3, third).where(book::id, eq(id)).sync();
|
||||
expected.put(3, third);
|
||||
session.update().put(book::contents, 3, third).where(book::id, eq(id)).sync();
|
||||
|
||||
actualMap = session.select(book::contents).where(book::id, eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
actualMap = session.select(book::contents).where(book::id, eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
|
||||
// putAll operation
|
||||
expected.putAll(contents);
|
||||
session.update().putAll(book::contents, contents).where(book::id, eq(id)).sync();
|
||||
// putAll operation
|
||||
expected.putAll(contents);
|
||||
session.update().putAll(book::contents, contents).where(book::id, eq(id)).sync();
|
||||
|
||||
actualMap = session.select(book::contents).where(book::id, eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
actualMap = session.select(book::contents).where(book::id, eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
|
||||
// put existing
|
||||
// put existing
|
||||
|
||||
third = new SectionImpl("t-replace", 3);
|
||||
expected.put(3, third);
|
||||
session.update().put(book::contents, 3, third).where(book::id, eq(id)).sync();
|
||||
third = new SectionImpl("t-replace", 3);
|
||||
expected.put(3, third);
|
||||
session.update().put(book::contents, 3, third).where(book::id, eq(id)).sync();
|
||||
|
||||
actualMap = session.select(book::contents).where(book::id, eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
actualMap = session.select(book::contents).where(book::id, eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
|
||||
// DELETE
|
||||
// DELETE
|
||||
|
||||
// remove single key
|
||||
// remove single key
|
||||
|
||||
expected.remove(3);
|
||||
session.update().put(book::contents, 3, null).where(book::id, eq(id)).sync();
|
||||
expected.remove(3);
|
||||
session.update().put(book::contents, 3, null).where(book::id, eq(id)).sync();
|
||||
|
||||
actualMap = session.select(book::contents).where(book::id, eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
actualMap = session.select(book::contents).where(book::id, eq(id)).sync().findFirst().get()._1;
|
||||
assertEqualMaps(expected, actualMap);
|
||||
|
||||
// remove full map
|
||||
// remove full map
|
||||
|
||||
session.update().set(book::contents, null).where(book::id, eq(id)).sync();
|
||||
session.update().set(book::contents, null).where(book::id, eq(id)).sync();
|
||||
|
||||
actualMap = session.select(book::contents).where(book::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertNull(actualMap);
|
||||
actualMap = session.select(book::contents).where(book::id, eq(id)).sync().findFirst().get()._1;
|
||||
Assert.assertNull(actualMap);
|
||||
|
||||
// remove object
|
||||
// remove object
|
||||
|
||||
session.delete().where(book::id, eq(id)).sync();
|
||||
Long cnt = session.count().where(book::id, eq(id)).sync();
|
||||
Assert.assertEquals(Long.valueOf(0), cnt);
|
||||
}
|
||||
session.delete().where(book::id, eq(id)).sync();
|
||||
Long cnt = session.count().where(book::id, eq(id)).sync();
|
||||
Assert.assertEquals(Long.valueOf(0), cnt);
|
||||
}
|
||||
|
||||
private void assertEqualMaps(Map<Integer, Section> expected, Map<Integer, Section> actual) {
|
||||
private void assertEqualMaps(Map<Integer, Section> expected, Map<Integer, Section> actual) {
|
||||
|
||||
Assert.assertEquals(expected.size(), actual.size());
|
||||
Assert.assertEquals(expected.size(), actual.size());
|
||||
|
||||
for (Integer i : expected.keySet()) {
|
||||
Section e = expected.get(i);
|
||||
Section a = actual.get(i);
|
||||
Assert.assertEquals(e.title(), a.title());
|
||||
Assert.assertEquals(e.page(), a.page());
|
||||
}
|
||||
}
|
||||
for (Integer i : expected.keySet()) {
|
||||
Section e = expected.get(i);
|
||||
Section a = actual.get(i);
|
||||
Assert.assertEquals(e.title(), a.title());
|
||||
Assert.assertEquals(e.page(), a.page());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,133 +18,112 @@ package net.helenus.test.integration.core.unitofwork;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.core.UnitOfWork;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.core.UnitOfWork;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
|
||||
public class AndThenOrderTest extends AbstractEmbeddedCassandraTest {
|
||||
|
||||
static HelenusSession session;
|
||||
static HelenusSession session;
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeTest() {
|
||||
session = Helenus.init(getSession()).showCql().autoCreateDrop().get();
|
||||
}
|
||||
@BeforeClass
|
||||
public static void beforeTest() {
|
||||
session = Helenus.init(getSession()).showCql().autoCreateDrop().get();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAndThenOrdering() throws Exception {
|
||||
List<String> q = new ArrayList<String>(5);
|
||||
UnitOfWork uow1, uow2, uow3, uow4, uow5;
|
||||
@Test
|
||||
public void testAndThenOrdering() throws Exception {
|
||||
List<String> q = new ArrayList<String>(5);
|
||||
UnitOfWork uow1, uow2, uow3, uow4, uow5;
|
||||
|
||||
uow5 = session.begin();
|
||||
uow3 = session.begin(uow5);
|
||||
uow1 = session.begin(uow3);
|
||||
uow1.commit()
|
||||
.andThen(
|
||||
() -> {
|
||||
q.add("1");
|
||||
});
|
||||
uow2 = session.begin(uow3);
|
||||
uow2.commit()
|
||||
.andThen(
|
||||
() -> {
|
||||
q.add("2");
|
||||
});
|
||||
uow3.commit()
|
||||
.andThen(
|
||||
() -> {
|
||||
q.add("3");
|
||||
});
|
||||
uow4 = session.begin(uow5);
|
||||
uow4.commit()
|
||||
.andThen(
|
||||
() -> {
|
||||
q.add("4");
|
||||
});
|
||||
uow5.commit()
|
||||
.andThen(
|
||||
() -> {
|
||||
q.add("5");
|
||||
});
|
||||
uow5 = session.begin();
|
||||
uow3 = session.begin(uow5);
|
||||
uow1 = session.begin(uow3);
|
||||
uow1.commit().andThen(() -> {
|
||||
q.add("1");
|
||||
});
|
||||
uow2 = session.begin(uow3);
|
||||
uow2.commit().andThen(() -> {
|
||||
q.add("2");
|
||||
});
|
||||
uow3.commit().andThen(() -> {
|
||||
q.add("3");
|
||||
});
|
||||
uow4 = session.begin(uow5);
|
||||
uow4.commit().andThen(() -> {
|
||||
q.add("4");
|
||||
});
|
||||
uow5.commit().andThen(() -> {
|
||||
q.add("5");
|
||||
});
|
||||
|
||||
System.out.println(q);
|
||||
Assert.assertTrue(
|
||||
Arrays.equals(q.toArray(new String[5]), new String[] {"1", "2", "3", "4", "5"}));
|
||||
}
|
||||
System.out.println(q);
|
||||
Assert.assertTrue(Arrays.equals(q.toArray(new String[5]), new String[]{"1", "2", "3", "4", "5"}));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExceptionWithinAndThen() throws Exception {
|
||||
List<String> q = new ArrayList<String>(5);
|
||||
UnitOfWork uow1, uow2, uow3, uow4, uow5;
|
||||
@Test
|
||||
public void testExceptionWithinAndThen() throws Exception {
|
||||
List<String> q = new ArrayList<String>(5);
|
||||
UnitOfWork uow1, uow2, uow3, uow4, uow5;
|
||||
|
||||
uow5 = session.begin();
|
||||
uow4 = session.begin(uow5);
|
||||
try {
|
||||
uow3 = session.begin(uow4);
|
||||
uow1 = session.begin(uow3);
|
||||
uow1.commit()
|
||||
.andThen(
|
||||
() -> {
|
||||
q.add("1");
|
||||
});
|
||||
uow2 = session.begin(uow3);
|
||||
uow2.commit()
|
||||
.andThen(
|
||||
() -> {
|
||||
q.add("2");
|
||||
});
|
||||
uow3.commit()
|
||||
.andThen(
|
||||
() -> {
|
||||
q.add("3");
|
||||
});
|
||||
uow4.commit()
|
||||
.andThen(
|
||||
() -> {
|
||||
q.add("4");
|
||||
});
|
||||
throw new Exception();
|
||||
} catch (Exception e) {
|
||||
uow4.abort();
|
||||
}
|
||||
uow5.commit()
|
||||
.andThen(
|
||||
() -> {
|
||||
q.add("5");
|
||||
});
|
||||
uow5 = session.begin();
|
||||
uow4 = session.begin(uow5);
|
||||
try {
|
||||
uow3 = session.begin(uow4);
|
||||
uow1 = session.begin(uow3);
|
||||
uow1.commit().andThen(() -> {
|
||||
q.add("1");
|
||||
});
|
||||
uow2 = session.begin(uow3);
|
||||
uow2.commit().andThen(() -> {
|
||||
q.add("2");
|
||||
});
|
||||
uow3.commit().andThen(() -> {
|
||||
q.add("3");
|
||||
});
|
||||
uow4.commit().andThen(() -> {
|
||||
q.add("4");
|
||||
});
|
||||
throw new Exception();
|
||||
} catch (Exception e) {
|
||||
uow4.abort();
|
||||
}
|
||||
uow5.commit().andThen(() -> {
|
||||
q.add("5");
|
||||
});
|
||||
|
||||
System.out.println(q);
|
||||
Assert.assertTrue(q.isEmpty() == true);
|
||||
}
|
||||
System.out.println(q);
|
||||
Assert.assertTrue(q.isEmpty() == true);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testClosableWillAbortWhenNotCommitted() throws Exception {
|
||||
UnitOfWork unitOfWork;
|
||||
try (UnitOfWork uow = session.begin()) {
|
||||
unitOfWork = uow;
|
||||
Assert.assertFalse(uow.hasAborted());
|
||||
}
|
||||
Assert.assertTrue(unitOfWork.hasAborted());
|
||||
}
|
||||
@Test
|
||||
public void testClosableWillAbortWhenNotCommitted() throws Exception {
|
||||
UnitOfWork unitOfWork;
|
||||
try (UnitOfWork uow = session.begin()) {
|
||||
unitOfWork = uow;
|
||||
Assert.assertFalse(uow.hasAborted());
|
||||
}
|
||||
Assert.assertTrue(unitOfWork.hasAborted());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testClosable() throws Exception {
|
||||
UnitOfWork unitOfWork;
|
||||
try (UnitOfWork uow = session.begin()) {
|
||||
unitOfWork = uow;
|
||||
Assert.assertFalse(uow.hasAborted());
|
||||
uow.commit()
|
||||
.andThen(
|
||||
() -> {
|
||||
Assert.assertFalse(uow.hasAborted());
|
||||
Assert.assertTrue(uow.hasCommitted());
|
||||
});
|
||||
}
|
||||
Assert.assertFalse(unitOfWork.hasAborted());
|
||||
Assert.assertTrue(unitOfWork.hasCommitted());
|
||||
}
|
||||
@Test
|
||||
public void testClosable() throws Exception {
|
||||
UnitOfWork unitOfWork;
|
||||
try (UnitOfWork uow = session.begin()) {
|
||||
unitOfWork = uow;
|
||||
Assert.assertFalse(uow.hasAborted());
|
||||
uow.commit().andThen(() -> {
|
||||
Assert.assertFalse(uow.hasAborted());
|
||||
Assert.assertTrue(uow.hasCommitted());
|
||||
});
|
||||
}
|
||||
Assert.assertFalse(unitOfWork.hasAborted());
|
||||
Assert.assertTrue(unitOfWork.hasCommitted());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,13 +15,16 @@
|
|||
*/
|
||||
package net.helenus.test.integration.core.unitofwork;
|
||||
|
||||
import com.datastax.driver.core.DataType.Name;
|
||||
import java.util.Set;
|
||||
import net.helenus.mapping.annotation.*;
|
||||
|
||||
import com.datastax.driver.core.DataType.Name;
|
||||
|
||||
import net.helenus.mapping.annotation.Types;
|
||||
import net.helenus.mapping.annotation.UDT;
|
||||
|
||||
@UDT
|
||||
public interface Directory extends FilesystemNode {
|
||||
|
||||
@Types.Set(Name.TIMEUUID)
|
||||
Set<FilesystemNode> inodes();
|
||||
@Types.Set(Name.TIMEUUID)
|
||||
Set<FilesystemNode> inodes();
|
||||
}
|
||||
|
|
|
@ -15,11 +15,12 @@
|
|||
*/
|
||||
package net.helenus.test.integration.core.unitofwork;
|
||||
|
||||
import net.helenus.mapping.annotation.*;
|
||||
import net.helenus.mapping.annotation.Column;
|
||||
import net.helenus.mapping.annotation.UDT;
|
||||
|
||||
@UDT
|
||||
public interface File extends FilesystemNode {
|
||||
|
||||
@Column
|
||||
byte[] data();
|
||||
@Column
|
||||
byte[] data();
|
||||
}
|
||||
|
|
|
@ -20,5 +20,5 @@ import net.helenus.mapping.annotation.UDT;
|
|||
@UDT
|
||||
public interface FileAttributes {
|
||||
|
||||
String owner();
|
||||
String owner();
|
||||
}
|
||||
|
|
|
@ -16,17 +16,21 @@
|
|||
package net.helenus.test.integration.core.unitofwork;
|
||||
|
||||
import java.util.UUID;
|
||||
import net.helenus.mapping.annotation.*;
|
||||
|
||||
import net.helenus.mapping.annotation.ClusteringColumn;
|
||||
import net.helenus.mapping.annotation.Column;
|
||||
import net.helenus.mapping.annotation.PartitionKey;
|
||||
import net.helenus.mapping.annotation.Table;
|
||||
|
||||
@Table("fs")
|
||||
public interface FilesystemNode {
|
||||
|
||||
@PartitionKey
|
||||
UUID inode();
|
||||
@PartitionKey
|
||||
UUID inode();
|
||||
|
||||
@ClusteringColumn
|
||||
String name();
|
||||
@ClusteringColumn
|
||||
String name();
|
||||
|
||||
@Column
|
||||
FileAttributes attr();
|
||||
@Column
|
||||
FileAttributes attr();
|
||||
}
|
||||
|
|
|
@ -17,223 +17,242 @@ package net.helenus.test.integration.core.unitofwork;
|
|||
|
||||
import static net.helenus.core.Query.eq;
|
||||
|
||||
import com.datastax.driver.core.utils.UUIDs;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import com.datastax.driver.core.utils.UUIDs;
|
||||
|
||||
import net.bytebuddy.utility.RandomString;
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.core.UnitOfWork;
|
||||
import net.helenus.core.annotation.Cacheable;
|
||||
import net.helenus.mapping.annotation.Column;
|
||||
import net.helenus.mapping.annotation.Constraints;
|
||||
import net.helenus.mapping.annotation.Index;
|
||||
import net.helenus.mapping.annotation.PartitionKey;
|
||||
import net.helenus.mapping.annotation.Table;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
import org.junit.Assert;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
@Table
|
||||
@Cacheable
|
||||
interface Widget {
|
||||
@PartitionKey
|
||||
UUID id();
|
||||
@PartitionKey
|
||||
UUID id();
|
||||
|
||||
@Column
|
||||
@Index
|
||||
String name();
|
||||
@Index
|
||||
@Constraints.Distinct()
|
||||
String name();
|
||||
}
|
||||
|
||||
public class UnitOfWorkTest extends AbstractEmbeddedCassandraTest {
|
||||
|
||||
static Widget widget;
|
||||
static HelenusSession session;
|
||||
static Widget widget;
|
||||
static HelenusSession session;
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeTest() {
|
||||
session = Helenus.init(getSession()).showCql().add(Widget.class).autoCreateDrop().get();
|
||||
widget = session.dsl(Widget.class);
|
||||
}
|
||||
@BeforeClass
|
||||
public static void beforeTest() {
|
||||
session = Helenus.init(getSession()).showCql().add(Widget.class).autoCreateDrop().get();
|
||||
widget = session.dsl(Widget.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSelectAfterSelect() throws Exception {
|
||||
Widget w1, w2, w3;
|
||||
UUID key = UUIDs.timeBased();
|
||||
@Test
|
||||
public void testSelectAfterSelect() throws Exception {
|
||||
Widget w1, w2, w3, w4;
|
||||
UUID key = UUIDs.timeBased();
|
||||
|
||||
// This should inserted Widget, but not cache it.
|
||||
session
|
||||
.<Widget>insert(widget)
|
||||
.value(widget::id, key)
|
||||
.value(widget::name, RandomString.make(20))
|
||||
.sync();
|
||||
// This should inserted Widget, but not cache it.
|
||||
w1 = session.<Widget>insert(widget).value(widget::id, key).value(widget::name, RandomString.make(20)).sync();
|
||||
|
||||
try (UnitOfWork uow = session.begin()) {
|
||||
try (UnitOfWork uow = session.begin()) {
|
||||
|
||||
uow.setPurpose("testSelectAfterSelect");
|
||||
uow.setPurpose("testSelectAfterSelect");
|
||||
|
||||
// This should read from the database and return a Widget.
|
||||
w1 =
|
||||
session.<Widget>select(widget).where(widget::id, eq(key)).single().sync(uow).orElse(null);
|
||||
// This should read from the database and return a Widget.
|
||||
w2 = session.<Widget>select(widget).where(widget::id, eq(key)).single().sync(uow).orElse(null);
|
||||
|
||||
// This should read from the cache and get the same instance of a Widget.
|
||||
w2 =
|
||||
session.<Widget>select(widget).where(widget::id, eq(key)).single().sync(uow).orElse(null);
|
||||
// This should read from the cache and get the same instance of a Widget.
|
||||
w3 = session.<Widget>select(widget).where(widget::id, eq(key)).single().sync(uow).orElse(null);
|
||||
|
||||
uow.commit()
|
||||
.andThen(
|
||||
() -> {
|
||||
uow.commit().andThen(() -> {
|
||||
Assert.assertEquals(w2, w3);
|
||||
});
|
||||
}
|
||||
|
||||
w4 = session.<Widget>select(widget).where(widget::name, eq(w1.name())).single().sync().orElse(null);
|
||||
Assert.assertEquals(w4, w1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSelectAfterNestedSelect() throws Exception {
|
||||
Widget w1, w2, w3, w4;
|
||||
UUID key1 = UUIDs.timeBased();
|
||||
UUID key2 = UUIDs.timeBased();
|
||||
|
||||
// This should inserted Widget, and not cache it in uow1.
|
||||
try (UnitOfWork uow1 = session.begin()) {
|
||||
w1 = session.<Widget>insert(widget).value(widget::id, key1).value(widget::name, RandomString.make(20))
|
||||
.sync(uow1);
|
||||
|
||||
try (UnitOfWork uow2 = session.begin(uow1)) {
|
||||
|
||||
// This should read from uow1's cache and return the same Widget.
|
||||
w2 = session.<Widget>select(widget).where(widget::id, eq(key1)).single().sync(uow2).orElse(null);
|
||||
|
||||
Assert.assertEquals(w1, w2);
|
||||
|
||||
w3 = session.<Widget>insert(widget).value(widget::id, key2).value(widget::name, RandomString.make(20))
|
||||
.sync(uow2);
|
||||
|
||||
uow2.commit().andThen(() -> {
|
||||
Assert.assertEquals(w1, w2);
|
||||
});
|
||||
}
|
||||
|
||||
// This should read from the cache and get the same instance of a Widget.
|
||||
w4 = session.<Widget>select(widget).where(widget::id, eq(key2)).single().sync(uow1).orElse(null);
|
||||
|
||||
uow1.commit().andThen(() -> {
|
||||
Assert.assertEquals(w3, w4);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSelectViaIndexAfterSelect() throws Exception {
|
||||
Widget w1, w2;
|
||||
UUID key = UUIDs.timeBased();
|
||||
|
||||
try (UnitOfWork uow = session.begin()) {
|
||||
// This should insert and cache Widget in the uow.
|
||||
session.<Widget>insert(widget).value(widget::id, key).value(widget::name, RandomString.make(20)).sync(uow);
|
||||
|
||||
// This should read from the database and return a Widget.
|
||||
w1 = session.<Widget>select(widget).where(widget::id, eq(key)).single().sync(uow).orElse(null);
|
||||
|
||||
// This should read from the cache and get the same instance of a Widget.
|
||||
w2 = session.<Widget>select(widget).where(widget::name, eq(w1.name())).single().sync(uow).orElse(null);
|
||||
|
||||
uow.commit().andThen(() -> {
|
||||
Assert.assertEquals(w1, w2);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSelectAfterUpdated() throws Exception {
|
||||
Widget w1, w2, w3, w4, w5, w6;
|
||||
UUID key = UUIDs.timeBased();
|
||||
|
||||
// This should inserted Widget, but not cache it.
|
||||
w1 = session.<Widget>insert(widget).value(widget::id, key).value(widget::name, RandomString.make(20)).sync();
|
||||
|
||||
try (UnitOfWork uow = session.begin()) {
|
||||
|
||||
// This should read from the database and return a Widget.
|
||||
w2 = session.<Widget>select(widget).where(widget::id, eq(key)).single()
|
||||
.sync(uow).orElse(null);
|
||||
Assert.assertEquals(w1, w2);
|
||||
|
||||
// This should remove the object from the cache.
|
||||
//TODO(gburd): w3 = session.
|
||||
session.<Widget>update(w2)
|
||||
.set(widget::name, "Bill")
|
||||
.where(widget::id, eq(key))
|
||||
.sync(uow);
|
||||
|
||||
// Fetch from session cache, should have old name.
|
||||
w4 = session.<Widget>select(widget).where(widget::id, eq(key)).single()
|
||||
.sync().orElse(null);
|
||||
Assert.assertEquals(w4, w2);
|
||||
Assert.assertEquals(w4.name(), w1.name());
|
||||
|
||||
// This should skip the cache.
|
||||
w5 = session.<Widget>select(widget).where(widget::id, eq(key)).single()
|
||||
.uncached()
|
||||
.sync().orElse(null);
|
||||
|
||||
Assert.assertNotEquals(w5, w2); // Not the same instance
|
||||
Assert.assertTrue(w2.equals(w5)); // But they have the same values
|
||||
Assert.assertFalse(w5.equals(w2)); // TODO(gburd): should also work
|
||||
Assert.assertEquals(w5.name(), "Bill");
|
||||
|
||||
uow.commit().andThen(() -> {
|
||||
Assert.assertEquals(w1, w2);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// The name changed, this should miss cache and not find anything in the database.
|
||||
w6 = session.<Widget>select(widget).where(widget::name, eq(w1.name())).single()
|
||||
.sync().orElse(null);
|
||||
Assert.assertTrue(w2.equals(w5));
|
||||
}
|
||||
|
||||
w3 = session.<Widget>select(widget).where(widget::name, eq(w1.name())).single().sync().orElse(null);
|
||||
Assert.assertEquals(w1, w3);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSelectAfterNestedSelect() throws Exception {
|
||||
Widget w1, w2, w3, w4;
|
||||
UUID key1 = UUIDs.timeBased();
|
||||
UUID key2 = UUIDs.timeBased();
|
||||
@Test
|
||||
public void testSelectAfterDeleted() throws Exception {
|
||||
Widget w1, w2, w3, w4;
|
||||
UUID key = UUIDs.timeBased();
|
||||
|
||||
// This should inserted Widget, and not cache it in uow1.
|
||||
try (UnitOfWork uow1 = session.begin()) {
|
||||
w1 =
|
||||
session
|
||||
.<Widget>insert(widget)
|
||||
.value(widget::id, key1)
|
||||
.value(widget::name, RandomString.make(20))
|
||||
.sync(uow1);
|
||||
// This should inserted Widget, but not cache it.
|
||||
w1 = session.<Widget>insert(widget).value(widget::id, key).value(widget::name, RandomString.make(20)).sync();
|
||||
|
||||
try (UnitOfWork uow2 = session.begin(uow1)) {
|
||||
try (UnitOfWork uow = session.begin()) {
|
||||
|
||||
// This should read from uow1's cache and return the same Widget.
|
||||
w2 =
|
||||
session
|
||||
.<Widget>select(widget)
|
||||
.where(widget::id, eq(key1))
|
||||
.single()
|
||||
.sync(uow2)
|
||||
.orElse(null);
|
||||
// This should read from the database and return a Widget.
|
||||
w2 = session.<Widget>select(widget).where(widget::id, eq(key)).single()
|
||||
.sync(uow).orElse(null);
|
||||
|
||||
Assert.assertEquals(w1, w2);
|
||||
// This should remove the object from the cache.
|
||||
session.delete(widget).where(widget::id, eq(key))
|
||||
.sync(uow);
|
||||
|
||||
w3 =
|
||||
session
|
||||
.<Widget>insert(widget)
|
||||
.value(widget::id, key2)
|
||||
.value(widget::name, RandomString.make(20))
|
||||
.sync(uow2);
|
||||
// This should fail to read from the cache.
|
||||
w3 = session.<Widget>select(widget).where(widget::id, eq(key)).single()
|
||||
.sync(uow).orElse(null);
|
||||
|
||||
uow2.commit()
|
||||
.andThen(
|
||||
() -> {
|
||||
Assert.assertEquals(w1, w2);
|
||||
});
|
||||
}
|
||||
Assert.assertEquals(w3, null);
|
||||
|
||||
// This should read from the cache and get the same instance of a Widget.
|
||||
w4 =
|
||||
session
|
||||
.<Widget>select(widget)
|
||||
.where(widget::id, eq(key2))
|
||||
.single()
|
||||
.sync(uow1)
|
||||
.orElse(null);
|
||||
|
||||
uow1.commit()
|
||||
.andThen(
|
||||
() -> {
|
||||
Assert.assertEquals(w3, w4);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSelectViaIndexAfterSelect() throws Exception {
|
||||
Widget w1, w2;
|
||||
UUID key = UUIDs.timeBased();
|
||||
|
||||
try (UnitOfWork uow = session.begin()) {
|
||||
// This should insert and cache Widget in the uow.
|
||||
session
|
||||
.<Widget>insert(widget)
|
||||
.value(widget::id, key)
|
||||
.value(widget::name, RandomString.make(20))
|
||||
.sync(uow);
|
||||
|
||||
// This should read from the database and return a Widget.
|
||||
w1 =
|
||||
session
|
||||
.<Widget>select(widget)
|
||||
.where(widget::id, eq(key))
|
||||
.single()
|
||||
.sync(uow)
|
||||
.orElse(null);
|
||||
|
||||
// This should read from the cache and get the same instance of a Widget.
|
||||
w2 =
|
||||
session
|
||||
.<Widget>select(widget)
|
||||
.where(widget::name, eq(w1.name()))
|
||||
.single()
|
||||
.sync(uow)
|
||||
.orElse(null);
|
||||
|
||||
uow.commit()
|
||||
.andThen(
|
||||
() -> {
|
||||
uow.commit().andThen(() -> {
|
||||
Assert.assertEquals(w1, w2);
|
||||
});
|
||||
Assert.assertEquals(w3, null);
|
||||
});
|
||||
}
|
||||
|
||||
w4 = session.<Widget>select(widget).where(widget::name, eq(w1.name())).single()
|
||||
.sync().orElse(null);
|
||||
|
||||
Assert.assertEquals(w4, null);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
@Test
|
||||
public void testSelectAfterInsertProperlyCachesEntity() throws Exception {
|
||||
Widget w1, w2, w3, w4;
|
||||
UUID key = UUIDs.timeBased();
|
||||
|
||||
try (UnitOfWork uow = session.begin()) {
|
||||
|
||||
// This should cache the inserted Widget.
|
||||
w1 = session.<Widget>insert(widget)
|
||||
.value(widget::id, key)
|
||||
.value(widget::name, RandomString.make(20))
|
||||
.sync(uow);
|
||||
|
||||
// This should read from the cache and get the same instance of a Widget.
|
||||
w2 = session.<Widget>select(widget)
|
||||
.where(widget::id, eq(key))
|
||||
.single()
|
||||
.sync(uow)
|
||||
.orElse(null);
|
||||
|
||||
uow.commit()
|
||||
.andThen(() -> {
|
||||
Assert.assertEquals(w1, w2);
|
||||
});
|
||||
}
|
||||
|
||||
// This should read the widget from the session cache and maintain object identity.
|
||||
w3 = session.<Widget>select(widget)
|
||||
.where(widget::id, eq(key))
|
||||
.single()
|
||||
.sync()
|
||||
.orElse(null);
|
||||
|
||||
Assert.assertEquals(w1, w3);
|
||||
|
||||
// This should read the widget from the database, no object identity but values should match.
|
||||
w4 = session.<Widget>select(widget)
|
||||
.where(widget::id, eq(key))
|
||||
.ignoreCache()
|
||||
.single()
|
||||
.sync()
|
||||
.orElse(null);
|
||||
|
||||
Assert.assertNotEquals(w1, w4);
|
||||
Assert.assertTrue(w1.equals(w4));
|
||||
}
|
||||
*/
|
||||
/*
|
||||
* @Test public void testSelectAfterInsertProperlyCachesEntity() throws
|
||||
* Exception { Widget w1, w2, w3, w4; UUID key = UUIDs.timeBased();
|
||||
*
|
||||
* try (UnitOfWork uow = session.begin()) {
|
||||
*
|
||||
* // This should cache the inserted Widget. w1 = session.<Widget>insert(widget)
|
||||
* .value(widget::id, key) .value(widget::name, RandomString.make(20))
|
||||
* .sync(uow);
|
||||
*
|
||||
* // This should read from the cache and get the same instance of a Widget. w2
|
||||
* = session.<Widget>select(widget) .where(widget::id, eq(key)) .single()
|
||||
* .sync(uow) .orElse(null);
|
||||
*
|
||||
* uow.commit() .andThen(() -> { Assert.assertEquals(w1, w2); }); }
|
||||
*
|
||||
* // This should read the widget from the session cache and maintain object
|
||||
* identity. w3 = session.<Widget>select(widget) .where(widget::id, eq(key))
|
||||
* .single() .sync() .orElse(null);
|
||||
*
|
||||
* Assert.assertEquals(w1, w3);
|
||||
*
|
||||
* // This should read the widget from the database, no object identity but
|
||||
* values should match. w4 = session.<Widget>select(widget) .where(widget::id,
|
||||
* eq(key)) .uncached() .single() .sync() .orElse(null);
|
||||
*
|
||||
* Assert.assertNotEquals(w1, w4); Assert.assertTrue(w1.equals(w4)); }
|
||||
*/
|
||||
}
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
package net.helenus.test.integration.core.usertype;
|
||||
|
||||
import com.datastax.driver.core.UDTValue;
|
||||
|
||||
import net.helenus.mapping.annotation.Column;
|
||||
import net.helenus.mapping.annotation.PartitionKey;
|
||||
import net.helenus.mapping.annotation.Table;
|
||||
|
@ -24,13 +25,13 @@ import net.helenus.mapping.annotation.Types;
|
|||
@Table
|
||||
public interface Account {
|
||||
|
||||
@PartitionKey(ordinal = 0)
|
||||
long id();
|
||||
@PartitionKey(ordinal = 0)
|
||||
long id();
|
||||
|
||||
@Column
|
||||
Address address();
|
||||
@Column
|
||||
Address address();
|
||||
|
||||
@Types.UDT("address")
|
||||
@Column
|
||||
UDTValue addressNoMapping();
|
||||
@Types.UDT("address")
|
||||
@Column
|
||||
UDTValue addressNoMapping();
|
||||
}
|
||||
|
|
|
@ -15,8 +15,10 @@
|
|||
*/
|
||||
package net.helenus.test.integration.core.usertype;
|
||||
|
||||
import com.datastax.driver.core.DataType;
|
||||
import java.util.Set;
|
||||
|
||||
import com.datastax.driver.core.DataType;
|
||||
|
||||
import net.helenus.mapping.annotation.Column;
|
||||
import net.helenus.mapping.annotation.Types;
|
||||
import net.helenus.mapping.annotation.UDT;
|
||||
|
@ -24,19 +26,19 @@ import net.helenus.mapping.annotation.UDT;
|
|||
@UDT("address")
|
||||
public interface Address {
|
||||
|
||||
@Column(ordinal = 0, value = "line_1")
|
||||
String street();
|
||||
@Column(ordinal = 0, value = "line_1")
|
||||
String street();
|
||||
|
||||
@Column
|
||||
String city();
|
||||
@Column
|
||||
String city();
|
||||
|
||||
@Column
|
||||
int zip();
|
||||
@Column
|
||||
int zip();
|
||||
|
||||
@Column
|
||||
String country();
|
||||
@Column
|
||||
String country();
|
||||
|
||||
@Column
|
||||
@Types.Set(DataType.Name.TEXT)
|
||||
Set<String> phones();
|
||||
@Column
|
||||
@Types.Set(DataType.Name.TEXT)
|
||||
Set<String> phones();
|
||||
}
|
||||
|
|
|
@ -6,6 +6,6 @@ import net.helenus.mapping.annotation.UDT;
|
|||
@UDT
|
||||
public interface AddressInformation {
|
||||
|
||||
@Column
|
||||
Address address();
|
||||
@Column
|
||||
Address address();
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package net.helenus.test.integration.core.usertype;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
import net.helenus.mapping.annotation.Column;
|
||||
import net.helenus.mapping.annotation.PartitionKey;
|
||||
import net.helenus.mapping.annotation.Table;
|
||||
|
@ -8,9 +9,9 @@ import net.helenus.mapping.annotation.Table;
|
|||
@Table
|
||||
public interface Customer {
|
||||
|
||||
@PartitionKey
|
||||
UUID id();
|
||||
@PartitionKey
|
||||
UUID id();
|
||||
|
||||
@Column
|
||||
AddressInformation addressInformation();
|
||||
@Column
|
||||
AddressInformation addressInformation();
|
||||
}
|
||||
|
|
|
@ -15,118 +15,111 @@
|
|||
*/
|
||||
package net.helenus.test.integration.core.usertype;
|
||||
|
||||
import com.google.common.collect.Sets;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.core.Query;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Assert;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import com.google.common.collect.Sets;
|
||||
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.core.Query;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
|
||||
public class InnerUserDefinedTypeTest extends AbstractEmbeddedCassandraTest {
|
||||
|
||||
static Customer customer;
|
||||
static AddressInformation addressInformation;
|
||||
static Customer customer;
|
||||
static AddressInformation addressInformation;
|
||||
|
||||
static HelenusSession session;
|
||||
static HelenusSession session;
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeTest() {
|
||||
Helenus.clearDslCache();
|
||||
session = Helenus.init(getSession()).showCql().add(Customer.class).autoCreateDrop().get();
|
||||
customer = Helenus.dsl(Customer.class);
|
||||
addressInformation = Helenus.dsl(AddressInformation.class);
|
||||
}
|
||||
@BeforeClass
|
||||
public static void beforeTest() {
|
||||
Helenus.clearDslCache();
|
||||
session = Helenus.init(getSession()).showCql().add(Customer.class).autoCreateDrop().get();
|
||||
customer = Helenus.dsl(Customer.class);
|
||||
addressInformation = Helenus.dsl(AddressInformation.class);
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void afterTest() {
|
||||
session.getSession().execute("DROP TABLE IF EXISTS customer;");
|
||||
session.getSession().execute("DROP TYPE IF EXISTS address_information;");
|
||||
// SchemaUtil.dropUserType(session.getSessionRepository().findUserType("address_information")), true);
|
||||
}
|
||||
@AfterClass
|
||||
public static void afterTest() {
|
||||
session.getSession().execute("DROP TABLE IF EXISTS customer;");
|
||||
session.getSession().execute("DROP TYPE IF EXISTS address_information;");
|
||||
// SchemaUtil.dropUserType(session.getSessionRepository().findUserType("address_information")),
|
||||
// true);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPrint() {
|
||||
System.out.println(addressInformation);
|
||||
System.out.println(customer);
|
||||
}
|
||||
@Test
|
||||
public void testPrint() {
|
||||
System.out.println(addressInformation);
|
||||
System.out.println(customer);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCrud() throws TimeoutException {
|
||||
@Test
|
||||
public void testCrud() throws TimeoutException {
|
||||
|
||||
UUID id = UUID.randomUUID();
|
||||
UUID id = UUID.randomUUID();
|
||||
|
||||
Address a =
|
||||
new Address() {
|
||||
Address a = new Address() {
|
||||
|
||||
@Override
|
||||
public String street() {
|
||||
return "1 st";
|
||||
}
|
||||
@Override
|
||||
public String street() {
|
||||
return "1 st";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String city() {
|
||||
return "San Jose";
|
||||
}
|
||||
@Override
|
||||
public String city() {
|
||||
return "San Jose";
|
||||
}
|
||||
|
||||
@Override
|
||||
public int zip() {
|
||||
return 95131;
|
||||
}
|
||||
@Override
|
||||
public int zip() {
|
||||
return 95131;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String country() {
|
||||
return "USA";
|
||||
}
|
||||
@Override
|
||||
public String country() {
|
||||
return "USA";
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<String> phones() {
|
||||
return Sets.newHashSet("14080000000");
|
||||
}
|
||||
};
|
||||
@Override
|
||||
public Set<String> phones() {
|
||||
return Sets.newHashSet("14080000000");
|
||||
}
|
||||
};
|
||||
|
||||
AddressInformation ai =
|
||||
new AddressInformation() {
|
||||
AddressInformation ai = new AddressInformation() {
|
||||
|
||||
@Override
|
||||
public Address address() {
|
||||
return a;
|
||||
}
|
||||
};
|
||||
@Override
|
||||
public Address address() {
|
||||
return a;
|
||||
}
|
||||
};
|
||||
|
||||
session.insert().value(customer::id, id).value(customer::addressInformation, ai).sync();
|
||||
session.insert().value(customer::id, id).value(customer::addressInformation, ai).sync();
|
||||
|
||||
String cql =
|
||||
session
|
||||
.update()
|
||||
.set(customer.addressInformation().address()::street, "3 st")
|
||||
.where(customer::id, Query.eq(id))
|
||||
.cql();
|
||||
String cql = session.update().set(customer.addressInformation().address()::street, "3 st")
|
||||
.where(customer::id, Query.eq(id)).cql();
|
||||
|
||||
//TODO: System.out.println("At the time when this test was written Cassandra did not support queries like this: " + cql);
|
||||
// TODO: System.out.println("At the time when this test was written Cassandra
|
||||
// did not support queries like this: " + cql);
|
||||
|
||||
session.update().set(customer::addressInformation, ai).where(customer::id, Query.eq(id)).sync();
|
||||
session.update().set(customer::addressInformation, ai).where(customer::id, Query.eq(id)).sync();
|
||||
|
||||
String street =
|
||||
session
|
||||
.select(customer.addressInformation().address()::street)
|
||||
.where(customer::id, Query.eq(id))
|
||||
.sync()
|
||||
.findFirst()
|
||||
.get()
|
||||
._1;
|
||||
String street = session.select(customer.addressInformation().address()::street)
|
||||
.where(customer::id, Query.eq(id)).sync().findFirst().get()._1;
|
||||
|
||||
Assert.assertEquals("1 st", street);
|
||||
Assert.assertEquals("1 st", street);
|
||||
|
||||
session.delete().where(customer::id, Query.eq(id)).sync();
|
||||
session.delete().where(customer::id, Query.eq(id)).sync();
|
||||
|
||||
Long cnt = session.count().where(customer::id, Query.eq(id)).sync();
|
||||
Long cnt = session.count().where(customer::id, Query.eq(id)).sync();
|
||||
|
||||
Assert.assertEquals(Long.valueOf(0), cnt);
|
||||
}
|
||||
Assert.assertEquals(Long.valueOf(0), cnt);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,242 +15,204 @@
|
|||
*/
|
||||
package net.helenus.test.integration.core.usertype;
|
||||
|
||||
import com.datastax.driver.core.UDTValue;
|
||||
import com.datastax.driver.core.UserType;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.core.Query;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import com.datastax.driver.core.UDTValue;
|
||||
import com.datastax.driver.core.UserType;
|
||||
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.core.Query;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
|
||||
public class UserDefinedTypeTest extends AbstractEmbeddedCassandraTest {
|
||||
|
||||
static Address address;
|
||||
static Account account;
|
||||
static Address address;
|
||||
static Account account;
|
||||
|
||||
static HelenusSession session;
|
||||
static HelenusSession session;
|
||||
|
||||
public static class AccountImpl implements Account {
|
||||
@BeforeClass
|
||||
public static void beforeTest() {
|
||||
session = Helenus.init(getSession()).showCql().add(Account.class).autoCreateDrop().get();
|
||||
address = Helenus.dsl(Address.class);
|
||||
account = Helenus.dsl(Account.class);
|
||||
}
|
||||
|
||||
long id;
|
||||
Address address;
|
||||
UDTValue addressNoMapping;
|
||||
@Test
|
||||
public void testPrint() {
|
||||
System.out.println(address);
|
||||
System.out.println(account);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long id() {
|
||||
return id;
|
||||
}
|
||||
@Test
|
||||
public void testMappingCRUID() throws TimeoutException {
|
||||
|
||||
@Override
|
||||
public Address address() {
|
||||
return address;
|
||||
}
|
||||
AddressImpl addr = new AddressImpl();
|
||||
addr.street = "1 st";
|
||||
addr.city = "San Jose";
|
||||
|
||||
@Override
|
||||
public UDTValue addressNoMapping() {
|
||||
return addressNoMapping;
|
||||
}
|
||||
}
|
||||
AccountImpl acc = new AccountImpl();
|
||||
acc.id = 123L;
|
||||
acc.address = addr;
|
||||
|
||||
public static class AddressImpl implements Address {
|
||||
// CREATE
|
||||
|
||||
String street;
|
||||
String city;
|
||||
int zip;
|
||||
String country;
|
||||
Set<String> phones;
|
||||
session.upsert(acc).sync();
|
||||
|
||||
@Override
|
||||
public String street() {
|
||||
return street;
|
||||
}
|
||||
// READ
|
||||
|
||||
@Override
|
||||
public String city() {
|
||||
return city;
|
||||
}
|
||||
String streetName = session.select(account.address()::street).where(account::id, Query.eq(123L)).sync()
|
||||
.findFirst().get()._1;
|
||||
|
||||
@Override
|
||||
public int zip() {
|
||||
return zip;
|
||||
}
|
||||
Assert.assertEquals("1 st", streetName);
|
||||
|
||||
@Override
|
||||
public String country() {
|
||||
return country;
|
||||
}
|
||||
// UPDATE
|
||||
|
||||
@Override
|
||||
public Set<String> phones() {
|
||||
return phones;
|
||||
}
|
||||
}
|
||||
AddressImpl expected = new AddressImpl();
|
||||
expected.street = "2 st";
|
||||
expected.city = "San Francisco";
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeTest() {
|
||||
session = Helenus.init(getSession()).showCql().add(Account.class).autoCreateDrop().get();
|
||||
address = Helenus.dsl(Address.class);
|
||||
account = Helenus.dsl(Account.class);
|
||||
}
|
||||
session.update().set(account::address, expected).where(account::id, Query.eq(123L)).sync();
|
||||
|
||||
@Test
|
||||
public void testPrint() {
|
||||
System.out.println(address);
|
||||
System.out.println(account);
|
||||
}
|
||||
Address actual = session.select(account::address).where(account::id, Query.eq(123L)).sync().findFirst()
|
||||
.get()._1;
|
||||
|
||||
@Test
|
||||
public void testMappingCRUID() throws TimeoutException {
|
||||
Assert.assertEquals(expected.street(), actual.street());
|
||||
Assert.assertEquals(expected.city(), actual.city());
|
||||
Assert.assertNull(actual.country());
|
||||
Assert.assertEquals(0, actual.zip());
|
||||
|
||||
AddressImpl addr = new AddressImpl();
|
||||
addr.street = "1 st";
|
||||
addr.city = "San Jose";
|
||||
// INSERT using UPDATE
|
||||
session.update().set(account::address, null).where(account::id, Query.eq(123L)).sync();
|
||||
|
||||
AccountImpl acc = new AccountImpl();
|
||||
acc.id = 123L;
|
||||
acc.address = addr;
|
||||
Address adrNull = session.select(account::address).where(account::id, Query.eq(123L)).sync().findFirst()
|
||||
.get()._1;
|
||||
Assert.assertNull(adrNull);
|
||||
|
||||
// CREATE
|
||||
// DELETE
|
||||
|
||||
session.upsert(acc).sync();
|
||||
session.delete().where(account::id, Query.eq(123L)).sync();
|
||||
|
||||
// READ
|
||||
Long cnt = session.count().where(account::id, Query.eq(123L)).sync();
|
||||
Assert.assertEquals(Long.valueOf(0), cnt);
|
||||
}
|
||||
|
||||
String streetName =
|
||||
session
|
||||
.select(account.address()::street)
|
||||
.where(account::id, Query.eq(123L))
|
||||
.sync()
|
||||
.findFirst()
|
||||
.get()
|
||||
._1;
|
||||
@Test
|
||||
public void testNoMapping() throws TimeoutException {
|
||||
|
||||
Assert.assertEquals("1 st", streetName);
|
||||
String ks = getSession().getLoggedKeyspace();
|
||||
UserType addressType = getSession().getCluster().getMetadata().getKeyspace(ks).getUserType("address");
|
||||
|
||||
// UPDATE
|
||||
UDTValue addressNoMapping = addressType.newValue();
|
||||
addressNoMapping.setString("line_1", "1st street");
|
||||
addressNoMapping.setString("city", "San Jose");
|
||||
|
||||
AddressImpl expected = new AddressImpl();
|
||||
expected.street = "2 st";
|
||||
expected.city = "San Francisco";
|
||||
AccountImpl acc = new AccountImpl();
|
||||
acc.id = 777L;
|
||||
acc.addressNoMapping = addressNoMapping;
|
||||
|
||||
session.update().set(account::address, expected).where(account::id, Query.eq(123L)).sync();
|
||||
// CREATE
|
||||
|
||||
Address actual =
|
||||
session
|
||||
.select(account::address)
|
||||
.where(account::id, Query.eq(123L))
|
||||
.sync()
|
||||
.findFirst()
|
||||
.get()
|
||||
._1;
|
||||
session.upsert(acc).sync();
|
||||
|
||||
Assert.assertEquals(expected.street(), actual.street());
|
||||
Assert.assertEquals(expected.city(), actual.city());
|
||||
Assert.assertNull(actual.country());
|
||||
Assert.assertEquals(0, actual.zip());
|
||||
// READ
|
||||
|
||||
// INSERT using UPDATE
|
||||
session.update().set(account::address, null).where(account::id, Query.eq(123L)).sync();
|
||||
UDTValue found = session.select(account::addressNoMapping).where(account::id, Query.eq(777L)).sync().findFirst()
|
||||
.get()._1;
|
||||
|
||||
Address adrNull =
|
||||
session
|
||||
.select(account::address)
|
||||
.where(account::id, Query.eq(123L))
|
||||
.sync()
|
||||
.findFirst()
|
||||
.get()
|
||||
._1;
|
||||
Assert.assertNull(adrNull);
|
||||
Assert.assertEquals(addressNoMapping.getType(), found.getType());
|
||||
Assert.assertEquals(addressNoMapping.getString("line_1"), found.getString("line_1"));
|
||||
Assert.assertEquals(addressNoMapping.getString("city"), found.getString("city"));
|
||||
|
||||
// DELETE
|
||||
// UPDATE
|
||||
|
||||
session.delete().where(account::id, Query.eq(123L)).sync();
|
||||
addressNoMapping = addressType.newValue();
|
||||
addressNoMapping.setString("line_1", "Market street");
|
||||
addressNoMapping.setString("city", "San Francisco");
|
||||
|
||||
Long cnt = session.count().where(account::id, Query.eq(123L)).sync();
|
||||
Assert.assertEquals(Long.valueOf(0), cnt);
|
||||
}
|
||||
session.update().set(account::addressNoMapping, addressNoMapping).where(account::id, Query.eq(777L)).sync();
|
||||
|
||||
@Test
|
||||
public void testNoMapping() throws TimeoutException {
|
||||
found = session.select(account::addressNoMapping).where(account::id, Query.eq(777L)).sync().findFirst()
|
||||
.get()._1;
|
||||
|
||||
String ks = getSession().getLoggedKeyspace();
|
||||
UserType addressType =
|
||||
getSession().getCluster().getMetadata().getKeyspace(ks).getUserType("address");
|
||||
Assert.assertEquals(addressNoMapping.getType(), found.getType());
|
||||
Assert.assertEquals(addressNoMapping.getString("line_1"), found.getString("line_1"));
|
||||
Assert.assertEquals(addressNoMapping.getString("city"), found.getString("city"));
|
||||
|
||||
UDTValue addressNoMapping = addressType.newValue();
|
||||
addressNoMapping.setString("line_1", "1st street");
|
||||
addressNoMapping.setString("city", "San Jose");
|
||||
// INSERT using UPDATE
|
||||
session.update().set(account::addressNoMapping, null).where(account::id, Query.eq(777L)).sync();
|
||||
|
||||
AccountImpl acc = new AccountImpl();
|
||||
acc.id = 777L;
|
||||
acc.addressNoMapping = addressNoMapping;
|
||||
found = session.select(account::addressNoMapping).where(account::id, Query.eq(777L)).sync().findFirst()
|
||||
.get()._1;
|
||||
Assert.assertNull(found);
|
||||
|
||||
// CREATE
|
||||
// DELETE
|
||||
|
||||
session.upsert(acc).sync();
|
||||
session.delete().where(account::id, Query.eq(777L)).sync();
|
||||
|
||||
// READ
|
||||
Long cnt = session.count().where(account::id, Query.eq(777L)).sync();
|
||||
Assert.assertEquals(Long.valueOf(0), cnt);
|
||||
}
|
||||
|
||||
UDTValue found =
|
||||
session
|
||||
.select(account::addressNoMapping)
|
||||
.where(account::id, Query.eq(777L))
|
||||
.sync()
|
||||
.findFirst()
|
||||
.get()
|
||||
._1;
|
||||
public static class AccountImpl implements Account {
|
||||
|
||||
Assert.assertEquals(addressNoMapping.getType(), found.getType());
|
||||
Assert.assertEquals(addressNoMapping.getString("line_1"), found.getString("line_1"));
|
||||
Assert.assertEquals(addressNoMapping.getString("city"), found.getString("city"));
|
||||
long id;
|
||||
Address address;
|
||||
UDTValue addressNoMapping;
|
||||
|
||||
// UPDATE
|
||||
@Override
|
||||
public long id() {
|
||||
return id;
|
||||
}
|
||||
|
||||
addressNoMapping = addressType.newValue();
|
||||
addressNoMapping.setString("line_1", "Market street");
|
||||
addressNoMapping.setString("city", "San Francisco");
|
||||
@Override
|
||||
public Address address() {
|
||||
return address;
|
||||
}
|
||||
|
||||
session
|
||||
.update()
|
||||
.set(account::addressNoMapping, addressNoMapping)
|
||||
.where(account::id, Query.eq(777L))
|
||||
.sync();
|
||||
@Override
|
||||
public UDTValue addressNoMapping() {
|
||||
return addressNoMapping;
|
||||
}
|
||||
}
|
||||
|
||||
found =
|
||||
session
|
||||
.select(account::addressNoMapping)
|
||||
.where(account::id, Query.eq(777L))
|
||||
.sync()
|
||||
.findFirst()
|
||||
.get()
|
||||
._1;
|
||||
public static class AddressImpl implements Address {
|
||||
|
||||
Assert.assertEquals(addressNoMapping.getType(), found.getType());
|
||||
Assert.assertEquals(addressNoMapping.getString("line_1"), found.getString("line_1"));
|
||||
Assert.assertEquals(addressNoMapping.getString("city"), found.getString("city"));
|
||||
String street;
|
||||
String city;
|
||||
int zip;
|
||||
String country;
|
||||
Set<String> phones;
|
||||
|
||||
// INSERT using UPDATE
|
||||
session.update().set(account::addressNoMapping, null).where(account::id, Query.eq(777L)).sync();
|
||||
@Override
|
||||
public String street() {
|
||||
return street;
|
||||
}
|
||||
|
||||
found =
|
||||
session
|
||||
.select(account::addressNoMapping)
|
||||
.where(account::id, Query.eq(777L))
|
||||
.sync()
|
||||
.findFirst()
|
||||
.get()
|
||||
._1;
|
||||
Assert.assertNull(found);
|
||||
@Override
|
||||
public String city() {
|
||||
return city;
|
||||
}
|
||||
|
||||
// DELETE
|
||||
@Override
|
||||
public int zip() {
|
||||
return zip;
|
||||
}
|
||||
|
||||
session.delete().where(account::id, Query.eq(777L)).sync();
|
||||
@Override
|
||||
public String country() {
|
||||
return country;
|
||||
}
|
||||
|
||||
Long cnt = session.count().where(account::id, Query.eq(777L)).sync();
|
||||
Assert.assertEquals(Long.valueOf(0), cnt);
|
||||
}
|
||||
@Override
|
||||
public Set<String> phones() {
|
||||
return phones;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,28 +2,25 @@ package net.helenus.test.integration.core.views;
|
|||
|
||||
import java.util.Date;
|
||||
import java.util.UUID;
|
||||
|
||||
import net.helenus.mapping.annotation.ClusteringColumn;
|
||||
import net.helenus.mapping.annotation.CoveringIndex;
|
||||
import net.helenus.mapping.annotation.PartitionKey;
|
||||
import net.helenus.mapping.annotation.Table;
|
||||
|
||||
@Table
|
||||
@CoveringIndex(
|
||||
name = "cyclist_mv",
|
||||
covering = {"age", "birthday", "country"},
|
||||
partitionKeys = {"age", "cid"},
|
||||
clusteringColumns = {}
|
||||
)
|
||||
@CoveringIndex(name = "cyclist_mv", covering = {"age", "birthday", "country"}, partitionKeys = {"age",
|
||||
"cid"}, clusteringColumns = {})
|
||||
public interface Cyclist {
|
||||
@ClusteringColumn
|
||||
UUID cid();
|
||||
@ClusteringColumn
|
||||
UUID cid();
|
||||
|
||||
String name();
|
||||
String name();
|
||||
|
||||
@PartitionKey
|
||||
int age();
|
||||
@PartitionKey
|
||||
int age();
|
||||
|
||||
Date birthday();
|
||||
Date birthday();
|
||||
|
||||
String country();
|
||||
String country();
|
||||
}
|
||||
|
|
|
@ -2,19 +2,23 @@ package net.helenus.test.integration.core.views;
|
|||
|
||||
import java.util.Date;
|
||||
import java.util.UUID;
|
||||
|
||||
import net.helenus.mapping.OrderingDirection;
|
||||
import net.helenus.mapping.annotation.*;
|
||||
import net.helenus.mapping.annotation.ClusteringColumn;
|
||||
import net.helenus.mapping.annotation.Index;
|
||||
import net.helenus.mapping.annotation.MaterializedView;
|
||||
import net.helenus.mapping.annotation.PartitionKey;
|
||||
|
||||
@MaterializedView
|
||||
public interface CyclistsByAge extends Cyclist {
|
||||
@PartitionKey
|
||||
UUID cid();
|
||||
@PartitionKey
|
||||
UUID cid();
|
||||
|
||||
@ClusteringColumn(ordering = OrderingDirection.ASC)
|
||||
int age();
|
||||
@ClusteringColumn(ordering = OrderingDirection.ASC)
|
||||
int age();
|
||||
|
||||
Date birthday();
|
||||
Date birthday();
|
||||
|
||||
@Index
|
||||
String country();
|
||||
@Index
|
||||
String country();
|
||||
}
|
||||
|
|
|
@ -19,13 +19,16 @@ import static net.helenus.core.Query.eq;
|
|||
|
||||
import java.text.ParseException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.*;
|
||||
import java.util.Date;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.HelenusSession;
|
||||
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
// See: https://docs.datastax.com/en/cql/3.3/cql/cql_using/useCreateMV.html
|
||||
// https://docs.datastax.com/en/cql/3.3/cql/cql_reference/cqlCreateMaterializedView.html
|
||||
|
@ -33,45 +36,35 @@ import org.junit.Test;
|
|||
// https://cassandra-zone.com/materialized-views/
|
||||
public class MaterializedViewTest extends AbstractEmbeddedCassandraTest {
|
||||
|
||||
static Cyclist cyclist;
|
||||
static HelenusSession session;
|
||||
static Cyclist cyclist;
|
||||
static HelenusSession session;
|
||||
|
||||
static Date dateFromString(String dateInString) {
|
||||
SimpleDateFormat formatter = new SimpleDateFormat("dd-MMM-yyyy");
|
||||
try {
|
||||
return formatter.parse(dateInString);
|
||||
} catch (ParseException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
static Date dateFromString(String dateInString) {
|
||||
SimpleDateFormat formatter = new SimpleDateFormat("dd-MMM-yyyy");
|
||||
try {
|
||||
return formatter.parse(dateInString);
|
||||
} catch (ParseException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeTest() {
|
||||
session =
|
||||
Helenus.init(getSession())
|
||||
.showCql()
|
||||
.add(Cyclist.class)
|
||||
.add(CyclistsByAge.class)
|
||||
.autoCreateDrop()
|
||||
.get();
|
||||
cyclist = session.dsl(Cyclist.class);
|
||||
@BeforeClass
|
||||
public static void beforeTest() {
|
||||
session = Helenus.init(getSession()).showCql().add(Cyclist.class).add(CyclistsByAge.class).autoCreateDrop()
|
||||
.get();
|
||||
cyclist = session.dsl(Cyclist.class);
|
||||
|
||||
//try {
|
||||
session
|
||||
.insert(cyclist)
|
||||
.value(cyclist::cid, UUID.randomUUID())
|
||||
.value(cyclist::age, 18)
|
||||
.value(cyclist::birthday, dateFromString("1997-02-08"))
|
||||
.value(cyclist::country, "Netherlands")
|
||||
.value(cyclist::name, "Pascal EENKHOORN")
|
||||
.sync();
|
||||
//} catch (TimeoutException e) {
|
||||
//}
|
||||
}
|
||||
try {
|
||||
session.insert(cyclist).value(cyclist::cid, UUID.randomUUID()).value(cyclist::age, 18)
|
||||
.value(cyclist::birthday, dateFromString("1997-02-08")).value(cyclist::country, "Netherlands")
|
||||
.value(cyclist::name, "Pascal EENKHOORN").sync();
|
||||
} catch (TimeoutException e) {
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMv() throws TimeoutException {
|
||||
session.select(Cyclist.class).from(CyclistsByAge.class).where(cyclist::age, eq(18)).sync();
|
||||
}
|
||||
@Test
|
||||
public void testMv() throws TimeoutException {
|
||||
session.select(Cyclist.class).from(CyclistsByAge.class).where(cyclist::age, eq(18)).sync();
|
||||
}
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue