Merge branch 'develop'

This commit is contained in:
Greg Burd 2017-10-23 14:48:33 -04:00
commit dcc0927a4a
232 changed files with 11166 additions and 10234 deletions

57
NOTES
View file

@ -1,3 +1,8 @@
--- Cache
// `E` is the type of the Entity class or one of:
// - ResultSet
@ -315,3 +320,55 @@ begin:
}
};
}
----------------------------------
if ("ttl".equals(methodName) && method.getParameterCount() == 1 && method.getReturnType() == int.class) {
Getter getter = (Getter) args[0];
if (getter == null) {
return false;
}
HelenusProperty prop = MappingUtil.resolveMappingProperty(getter).getProperty();
String getterName = prop.getPropertyName();
String ttlKeyForProperty = prop.getColumnName().toCql() + "_ttl";
if (src.containsKey(ttlKeyForProperty)) {
return src.get(ttlKeyForProperty);
} else {
return 0;
}
}
if ("written".equals(methodName) && method.getParameterCount() == 1 && method.getReturnType() == int.class) {
Getter getter = (Getter) args[0];
if (getter == null) {
return false;
}
HelenusProperty prop = MappingUtil.resolveMappingProperty(getter).getProperty();
String getterName = prop.getPropertyName();
String ttlKeyForProperty = prop.getColumnName().toCql() + "_ttl";
if (src.containsKey(ttlKeyForProperty)) {
return src.get(ttlKeyForProperty);
} else {
return 0;
}
}
-----------------
/*else {
Cache<String, Object> cache = session.getSessionCache();
Map<String, Object> rowMap = this.cache.rowMap();
for (String rowKey : rowMap.keySet()) {
String keys = flattenFacets(facets);
for (String key : keys) {
Object value = cache.getIfPresent(key);
if (value != null) {
result = Optional.of(value);
break;
}
}
}
cache.put
}
*/

View file

@ -64,7 +64,6 @@ dependencies {
compile group: 'org.aspectj', name: 'aspectjweaver', version: '1.8.10'
compile group: 'org.apache.commons', name: 'commons-lang3', version: '3.6'
compile group: 'org.springframework', name: 'spring-core', version: '4.3.10.RELEASE'
compile group: 'com.google.guava', name: 'guava', version: '20.0'
compile group: 'com.diffplug.durian', name: 'durian', version: '3.+'
compile group: 'io.zipkin.java', name: 'zipkin', version: '1.29.2'

View file

@ -15,9 +15,10 @@
*/
package com.datastax.driver.core.querybuilder;
import com.datastax.driver.core.CodecRegistry;
import java.util.List;
import com.datastax.driver.core.CodecRegistry;
public class IsNotNullClause extends Clause {
final String name;

View file

@ -1,8 +1,6 @@
package com.datastax.driver.core.schemabuilder;
import static com.datastax.driver.core.schemabuilder.SchemaStatement.STATEMENT_START;
import static com.datastax.driver.core.schemabuilder.SchemaStatement.validateNotEmpty;
import static com.datastax.driver.core.schemabuilder.SchemaStatement.validateNotKeyWord;
import static com.datastax.driver.core.schemabuilder.SchemaStatement.*;
import com.google.common.base.Optional;
@ -18,10 +16,8 @@ public class CreateCustomIndex extends CreateIndex {
CreateCustomIndex(String indexName) {
super(indexName);
validateNotEmpty(indexName, "Index name");
validateNotKeyWord(
indexName,
String.format(
"The index name '%s' is not allowed because it is a reserved keyword", indexName));
validateNotKeyWord(indexName,
String.format("The index name '%s' is not allowed because it is a reserved keyword", indexName));
this.indexName = indexName;
}
@ -38,22 +34,20 @@ public class CreateCustomIndex extends CreateIndex {
/**
* Specify the keyspace and table to create the index on.
*
* @param keyspaceName the keyspace name.
* @param tableName the table name.
* @return a {@link CreateIndex.CreateIndexOn} that will allow the specification of the column.
* @param keyspaceName
* the keyspace name.
* @param tableName
* the table name.
* @return a {@link CreateIndex.CreateIndexOn} that will allow the specification
* of the column.
*/
public CreateIndex.CreateIndexOn onTable(String keyspaceName, String tableName) {
validateNotEmpty(keyspaceName, "Keyspace name");
validateNotEmpty(tableName, "Table name");
validateNotKeyWord(
keyspaceName,
String.format(
"The keyspace name '%s' is not allowed because it is a reserved keyword",
keyspaceName));
validateNotKeyWord(
tableName,
String.format(
"The table name '%s' is not allowed because it is a reserved keyword", tableName));
validateNotKeyWord(keyspaceName,
String.format("The keyspace name '%s' is not allowed because it is a reserved keyword", keyspaceName));
validateNotKeyWord(tableName,
String.format("The table name '%s' is not allowed because it is a reserved keyword", tableName));
this.keyspaceName = Optional.fromNullable(keyspaceName);
this.tableName = tableName;
return new CreateCustomIndex.CreateIndexOn();
@ -62,54 +56,19 @@ public class CreateCustomIndex extends CreateIndex {
/**
* Specify the table to create the index on.
*
* @param tableName the table name.
* @return a {@link CreateIndex.CreateIndexOn} that will allow the specification of the column.
* @param tableName
* the table name.
* @return a {@link CreateIndex.CreateIndexOn} that will allow the specification
* of the column.
*/
public CreateIndex.CreateIndexOn onTable(String tableName) {
validateNotEmpty(tableName, "Table name");
validateNotKeyWord(
tableName,
String.format(
"The table name '%s' is not allowed because it is a reserved keyword", tableName));
validateNotKeyWord(tableName,
String.format("The table name '%s' is not allowed because it is a reserved keyword", tableName));
this.tableName = tableName;
return new CreateCustomIndex.CreateIndexOn();
}
public class CreateIndexOn extends CreateIndex.CreateIndexOn {
/**
* Specify the column to create the index on.
*
* @param columnName the column name.
* @return the final CREATE INDEX statement.
*/
public SchemaStatement andColumn(String columnName) {
validateNotEmpty(columnName, "Column name");
validateNotKeyWord(
columnName,
String.format(
"The column name '%s' is not allowed because it is a reserved keyword", columnName));
CreateCustomIndex.this.columnName = columnName;
return SchemaStatement.fromQueryString(buildInternal());
}
/**
* Create an index on the keys of the given map column.
*
* @param columnName the column name.
* @return the final CREATE INDEX statement.
*/
public SchemaStatement andKeysOfColumn(String columnName) {
validateNotEmpty(columnName, "Column name");
validateNotKeyWord(
columnName,
String.format(
"The column name '%s' is not allowed because it is a reserved keyword", columnName));
CreateCustomIndex.this.columnName = columnName;
CreateCustomIndex.this.keys = true;
return SchemaStatement.fromQueryString(buildInternal());
}
}
String getCustomClassName() {
return "";
}
@ -120,8 +79,7 @@ public class CreateCustomIndex extends CreateIndex {
@Override
public String buildInternal() {
StringBuilder createStatement =
new StringBuilder(STATEMENT_START).append("CREATE CUSTOM INDEX ");
StringBuilder createStatement = new StringBuilder(STATEMENT_START).append("CREATE CUSTOM INDEX ");
if (ifNotExists) {
createStatement.append("IF NOT EXISTS ");
@ -154,4 +112,37 @@ public class CreateCustomIndex extends CreateIndex {
return createStatement.toString();
}
public class CreateIndexOn extends CreateIndex.CreateIndexOn {
/**
* Specify the column to create the index on.
*
* @param columnName
* the column name.
* @return the final CREATE INDEX statement.
*/
public SchemaStatement andColumn(String columnName) {
validateNotEmpty(columnName, "Column name");
validateNotKeyWord(columnName,
String.format("The column name '%s' is not allowed because it is a reserved keyword", columnName));
CreateCustomIndex.this.columnName = columnName;
return SchemaStatement.fromQueryString(buildInternal());
}
/**
* Create an index on the keys of the given map column.
*
* @param columnName
* the column name.
* @return the final CREATE INDEX statement.
*/
public SchemaStatement andKeysOfColumn(String columnName) {
validateNotEmpty(columnName, "Column name");
validateNotKeyWord(columnName,
String.format("The column name '%s' is not allowed because it is a reserved keyword", columnName));
CreateCustomIndex.this.columnName = columnName;
CreateCustomIndex.this.keys = true;
return SchemaStatement.fromQueryString(buildInternal());
}
}
}

View file

@ -10,8 +10,8 @@ public class CreateMaterializedView extends Create {
private String primaryKey;
private String clustering;
public CreateMaterializedView(
String keyspaceName, String viewName, Select.Where selection, String primaryKey, String clustering) {
public CreateMaterializedView(String keyspaceName, String viewName, Select.Where selection, String primaryKey,
String clustering) {
super(keyspaceName, viewName);
this.viewName = viewName;
this.selection = selection;
@ -24,8 +24,7 @@ public class CreateMaterializedView extends Create {
}
public String buildInternal() {
StringBuilder createStatement =
new StringBuilder(STATEMENT_START).append("CREATE MATERIALIZED VIEW");
StringBuilder createStatement = new StringBuilder(STATEMENT_START).append("CREATE MATERIALIZED VIEW");
if (ifNotExists) {
createStatement.append(" IF NOT EXISTS");
}

View file

@ -11,8 +11,7 @@ public class CreateSasiIndex extends CreateCustomIndex {
}
String getOptions() {
return "'analyzer_class': "
+ "'org.apache.cassandra.index.sasi.analyzer.NonTokenizingAnalyzer', "
return "'analyzer_class': " + "'org.apache.cassandra.index.sasi.analyzer.NonTokenizingAnalyzer', "
+ "'case_sensitive': 'false'";
}
}

View file

@ -4,18 +4,10 @@ import com.google.common.base.Optional;
public class DropMaterializedView extends Drop {
enum DroppedItem {
TABLE,
TYPE,
INDEX,
MATERIALIZED_VIEW
}
private final String itemType = "MATERIALIZED VIEW";
private Optional<String> keyspaceName = Optional.absent();
private String itemName;
private boolean ifExists = true;
private final String itemType = "MATERIALIZED VIEW";
public DropMaterializedView(String keyspaceName, String viewName) {
this(keyspaceName, viewName, DroppedItem.MATERIALIZED_VIEW);
}
@ -50,4 +42,8 @@ public class DropMaterializedView extends Drop {
dropStatement.append(itemName);
return dropStatement.toString();
}
enum DroppedItem {
TABLE, TYPE, INDEX, MATERIALIZED_VIEW
}
}

View file

@ -17,6 +17,7 @@ package net.helenus.config;
import java.lang.reflect.Method;
import java.util.function.Function;
import net.helenus.core.DslInstantiator;
import net.helenus.core.MapperInstantiator;
import net.helenus.core.reflect.ReflectionDslInstantiator;

View file

@ -18,6 +18,7 @@ package net.helenus.config;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.function.Function;
import net.helenus.mapping.annotation.Transient;
public enum GetterMethodDetector implements Function<Method, Boolean> {

View file

@ -17,6 +17,7 @@ package net.helenus.config;
import java.lang.reflect.Method;
import java.util.function.Function;
import net.helenus.core.DslInstantiator;
import net.helenus.core.MapperInstantiator;

View file

@ -3,6 +3,7 @@ package net.helenus.core;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.util.Date;
import net.helenus.core.reflect.MapExportable;
public abstract class AbstractAuditedEntityDraft<E> extends AbstractEntityDraft<E> {

View file

@ -1,7 +1,12 @@
package net.helenus.core;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import com.google.common.primitives.Primitives;
import java.util.*;
import net.helenus.core.reflect.DefaultPrimitiveTypes;
import net.helenus.core.reflect.Drafted;
import net.helenus.core.reflect.MapExportable;

View file

@ -15,22 +15,28 @@
*/
package net.helenus.core;
import brave.Tracer;
import com.codahale.metrics.MetricRegistry;
import com.datastax.driver.core.*;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.google.common.util.concurrent.ListenableFuture;
import java.io.PrintStream;
import java.util.List;
import java.util.concurrent.Executor;
import net.helenus.mapping.value.ColumnValuePreparer;
import net.helenus.mapping.value.ColumnValueProvider;
import net.helenus.support.HelenusException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.codahale.metrics.MetricRegistry;
import com.datastax.driver.core.*;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.google.common.collect.Table;
import com.google.common.util.concurrent.ListenableFuture;
import brave.Tracer;
import net.helenus.core.cache.Facet;
import net.helenus.mapping.value.ColumnValuePreparer;
import net.helenus.mapping.value.ColumnValueProvider;
import net.helenus.support.HelenusException;
public abstract class AbstractSessionOperations {
final Logger logger = LoggerFactory.getLogger(getClass());
private static final Logger LOG = LoggerFactory.getLogger(AbstractSessionOperations.class);
public abstract Session currentSession();
@ -84,8 +90,8 @@ public abstract class AbstractSessionOperations {
}
void log(Statement statement, boolean showValues) {
if (logger.isInfoEnabled()) {
logger.info("Execute statement " + statement);
if (LOG.isInfoEnabled()) {
LOG.info("Execute statement " + statement);
}
if (isShowCql()) {
if (statement instanceof BuiltStatement) {
@ -113,6 +119,9 @@ public abstract class AbstractSessionOperations {
return null;
}
public void mergeCache(Table<String, String, Object> cache) {
}
RuntimeException translateException(RuntimeException e) {
if (e instanceof HelenusException) {
return e;
@ -120,6 +129,13 @@ public abstract class AbstractSessionOperations {
throw new HelenusException(e);
}
public Object checkCache(String tableName, List<Facet> facets) {
return null;
}
public void updateCache(Object pojo, List<Facet> facets) {
}
void printCql(String cql) {
getPrintStream().println(cql);
}

View file

@ -15,19 +15,38 @@
*/
package net.helenus.core;
import com.diffplug.common.base.Errors;
import com.google.common.collect.TreeTraverser;
import java.util.*;
import java.util.concurrent.TimeUnit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.diffplug.common.base.Errors;
import com.google.common.base.Stopwatch;
import com.google.common.collect.HashBasedTable;
import com.google.common.collect.Table;
import com.google.common.collect.TreeTraverser;
import net.helenus.core.cache.CacheUtil;
import net.helenus.core.cache.Facet;
/** Encapsulates the concept of a "transaction" as a unit-of-work. */
public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfWork, AutoCloseable {
public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfWork<E>, AutoCloseable {
private static final Logger LOG = LoggerFactory.getLogger(AbstractUnitOfWork.class);
private final List<AbstractUnitOfWork<E>> nested = new ArrayList<>();
private final HelenusSession session;
private final AbstractUnitOfWork<E> parent;
// Cache:
private final Table<String, String, Object> cache = HashBasedTable.create();
private List<CommitThunk> postCommit = new ArrayList<CommitThunk>();
private final Map<String, Set<Object>> cache = new HashMap<String, Set<Object>>();
private boolean aborted = false;
private boolean committed = false;
private String purpose_;
private Stopwatch elapsedTime_;
private Stopwatch databaseTime_ = Stopwatch.createUnstarted();
private Stopwatch cacheLookupTime_ = Stopwatch.createUnstarted();
protected AbstractUnitOfWork(HelenusSession session, AbstractUnitOfWork<E> parent) {
Objects.requireNonNull(session, "containing session cannot be null");
@ -36,40 +55,91 @@ public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfW
this.parent = parent;
}
public UnitOfWork addNestedUnitOfWork(UnitOfWork uow) {
@Override
public Stopwatch getExecutionTimer() {
return databaseTime_;
}
@Override
public Stopwatch getCacheLookupTimer() {
return cacheLookupTime_;
}
@Override
public void addNestedUnitOfWork(UnitOfWork<E> uow) {
synchronized (nested) {
nested.add((AbstractUnitOfWork<E>) uow);
}
}
@Override
public UnitOfWork<E> begin() {
elapsedTime_ = Stopwatch.createStarted();
// log.record(txn::start)
return this;
}
public UnitOfWork begin() {
// log.record(txn::start)
@Override
public UnitOfWork setPurpose(String purpose) {
purpose_ = purpose;
return this;
}
public void logTimers(String what) {
double e = (double) elapsedTime_.elapsed(TimeUnit.MICROSECONDS) / 1000.0;
double d = (double) databaseTime_.elapsed(TimeUnit.MICROSECONDS) / 1000.0;
double c = (double) cacheLookupTime_.elapsed(TimeUnit.MICROSECONDS) / 1000.0;
double fd = (d / (e - c)) * 100.0;
double fc = (c / (e - d)) * 100.0;
LOG.info(String.format("UOW(%s)%s %s (total: %.3fms cache: %.3fms %2.2f%% db: %.3fms %2.2f%%)", hashCode(),
(purpose_ == null ? "" : " " + purpose_), what, e, c, fc, d, fd));
}
private void applyPostCommitFunctions() {
if (!postCommit.isEmpty()) {
for (CommitThunk f : postCommit) {
f.apply();
}
}
logTimers("committed");
}
public Set<Object> cacheLookup(String key) {
Set<Object> r = getCache().get(key);
if (r != null) {
return r;
@Override
public Optional<Object> cacheLookup(List<Facet> facets) {
String tableName = CacheUtil.schemaName(facets);
Optional<Object> result = Optional.empty();
for (Facet facet : facets) {
if (!facet.fixed()) {
String columnName = facet.name() + "==" + facet.value();
Object value = cache.get(tableName, columnName);
if (value != null) {
if (result.isPresent() && result.get() != value) {
// One facet matched, but another did not.
result = Optional.empty();
break;
} else {
result = Optional.of(value);
}
}
}
}
if (!result.isPresent()) {
// Be sure to check all enclosing UnitOfWork caches as well, we may be nested.
if (parent != null) {
return parent.cacheLookup(key);
return parent.cacheLookup(facets);
}
}
return null;
return result;
}
public Map<String, Set<Object>> getCache() {
return cache;
@Override
public void cacheUpdate(Object value, List<Facet> facets) {
Facet table = facets.remove(0);
String tableName = table.value().toString();
for (Facet facet : facets) {
String columnName = facet.name() + "==" + facet.value();
cache.put(tableName, columnName, value);
}
}
private Iterator<AbstractUnitOfWork<E>> getChildNodes() {
@ -77,16 +147,19 @@ public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfW
}
/**
* Checks to see if the work performed between calling begin and now can be committed or not.
* Checks to see if the work performed between calling begin and now can be
* committed or not.
*
* @return a function from which to chain work that only happens when commit is successful
* @throws E when the work overlaps with other concurrent writers.
* @return a function from which to chain work that only happens when commit is
* successful
* @throws E
* when the work overlaps with other concurrent writers.
*/
public PostCommitFunction<Void, Void> commit() throws E {
// All nested UnitOfWork should be committed (not aborted) before calls to commit, check.
// All nested UnitOfWork should be committed (not aborted) before calls to
// commit, check.
boolean canCommit = true;
TreeTraverser<AbstractUnitOfWork<E>> traverser =
TreeTraverser.using(node -> node::getChildNodes);
TreeTraverser<AbstractUnitOfWork<E>> traverser = TreeTraverser.using(node -> node::getChildNodes);
for (AbstractUnitOfWork<E> uow : traverser.postOrderTraversal(this)) {
if (this != uow) {
canCommit &= (!uow.aborted && uow.committed);
@ -94,40 +167,29 @@ public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfW
}
// log.record(txn::provisionalCommit)
// examine log for conflicts in read-set and write-set between begin and provisional commit
// examine log for conflicts in read-set and write-set between begin and
// provisional commit
// if (conflict) { throw new ConflictingUnitOfWorkException(this) }
// else return function so as to enable commit.andThen(() -> { do something iff commit was successful; })
// else return function so as to enable commit.andThen(() -> { do something iff
// commit was successful; })
if (canCommit) {
committed = true;
aborted = false;
// TODO(gburd): union this cache with parent's (if there is a parent) or with the session cache for all cacheable entities we currently hold
nested.forEach((uow) -> Errors.rethrow().wrap(uow::commit));
// Merge UOW cache into parent's cache.
if (parent != null) {
Map<String, Set<Object>> parentCache = parent.getCache();
for (String key : cache.keySet()) {
if (parentCache.containsKey(key)) {
// merge the sets
Set<Object> ps = parentCache.get(key);
ps.addAll(
cache.get(key)); //TODO(gburd): review this, likely not correct in all cases as-is.
parent.mergeCache(cache);
} else {
// add the missing set
parentCache.put(key, cache.get(key));
}
}
session.mergeCache(cache);
}
elapsedTime_.stop();
// Apply all post-commit functions for
if (parent == null) {
traverser
.postOrderTraversal(this)
.forEach(
uow -> {
traverser.postOrderTraversal(this).forEach(uow -> {
uow.applyPostCommitFunctions();
});
return new PostCommitFunction(this, null);
@ -142,17 +204,30 @@ public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfW
/* Explicitly discard the work and mark it as as such in the log. */
public void abort() {
TreeTraverser<AbstractUnitOfWork<E>> traverser =
TreeTraverser.using(node -> node::getChildNodes);
traverser
.postOrderTraversal(this)
.forEach(
uow -> {
TreeTraverser<AbstractUnitOfWork<E>> traverser = TreeTraverser.using(node -> node::getChildNodes);
traverser.postOrderTraversal(this).forEach(uow -> {
uow.committed = false;
uow.aborted = true;
});
// log.record(txn::abort)
// cache.invalidateSince(txn::start time)
if (!hasAborted()) {
elapsedTime_.stop();
logTimers("aborted");
}
}
private void mergeCache(Table<String, String, Object> from) {
Table<String, String, Object> to = this.cache;
from.rowMap().forEach((rowKey, columnMap) -> {
columnMap.forEach((columnKey, value) -> {
if (to.contains(rowKey, columnKey)) {
to.put(rowKey, columnKey, CacheUtil.merge(to.get(rowKey, columnKey), from.get(rowKey, columnKey)));
} else {
to.put(rowKey, columnKey, from.get(rowKey, columnKey));
}
});
});
}
public String describeConflicts() {
@ -161,7 +236,8 @@ public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfW
@Override
public void close() throws E {
// Closing a AbstractUnitOfWork will abort iff we've not already aborted or committed this unit of work.
// Closing a AbstractUnitOfWork will abort iff we've not already aborted or
// committed this unit of work.
if (aborted == false && committed == false) {
abort();
}

View file

@ -16,8 +16,5 @@
package net.helenus.core;
public enum AutoDdl {
VALIDATE,
UPDATE,
CREATE,
CREATE_DROP;
VALIDATE, UPDATE, CREATE, CREATE_DROP;
}

View file

@ -1,6 +1,5 @@
package net.helenus.core;
@FunctionalInterface
public interface CommitThunk {
void apply();

View file

@ -15,15 +15,13 @@
*/
package net.helenus.core;
import com.datastax.driver.core.Metadata;
import java.util.Optional;
import com.datastax.driver.core.Metadata;
import net.helenus.core.reflect.HelenusPropertyNode;
public interface DslInstantiator {
<E> E instantiate(
Class<E> iface,
ClassLoader classLoader,
Optional<HelenusPropertyNode> parent,
Metadata metadata);
<E> E instantiate(Class<E> iface, ClassLoader classLoader, Optional<HelenusPropertyNode> parent, Metadata metadata);
}

View file

@ -15,8 +15,10 @@
*/
package net.helenus.core;
import com.datastax.driver.core.querybuilder.Clause;
import java.util.Objects;
import com.datastax.driver.core.querybuilder.Clause;
import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.MappingUtil;
import net.helenus.mapping.value.ColumnValuePreparer;
@ -31,14 +33,6 @@ public final class Filter<V> {
this.postulate = postulate;
}
public HelenusPropertyNode getNode() {
return node;
}
public Clause getClause(ColumnValuePreparer valuePreparer) {
return postulate.getClause(node, valuePreparer);
}
public static <V> Filter<V> equal(Getter<V> getter, V val) {
return create(getter, Operator.EQ, val);
}
@ -93,8 +87,7 @@ public final class Filter<V> {
Objects.requireNonNull(val, "empty value");
if (op == Operator.IN) {
throw new IllegalArgumentException(
"invalid usage of the 'in' operator, use Filter.in() static method");
throw new IllegalArgumentException("invalid usage of the 'in' operator, use Filter.in() static method");
}
HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter);
@ -104,6 +97,18 @@ public final class Filter<V> {
return new Filter<V>(node, postulate);
}
public HelenusPropertyNode getNode() {
return node;
}
public Clause getClause(ColumnValuePreparer valuePreparer) {
return postulate.getClause(node, valuePreparer);
}
public V[] postulateValues() {
return postulate.values();
}
@Override
public String toString() {
return node.getColumnName() + postulate.toString();

View file

@ -15,12 +15,17 @@
*/
package net.helenus.core;
import java.util.HashSet;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import com.datastax.driver.core.Cluster;
import com.datastax.driver.core.Metadata;
import com.datastax.driver.core.Session;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import net.helenus.config.DefaultHelenusSettings;
import net.helenus.config.HelenusSettings;
import net.helenus.core.reflect.DslExportable;
@ -30,15 +35,14 @@ import net.helenus.support.HelenusMappingException;
public final class Helenus {
private static volatile HelenusSettings settings = new DefaultHelenusSettings();
private static final ConcurrentMap<Class<?>, Object> dslCache =
new ConcurrentHashMap<Class<?>, Object>();
private static final ConcurrentMap<Class<?>, Metadata> metadataForEntity =
new ConcurrentHashMap<Class<?>, Metadata>();
private static final ConcurrentMap<Class<?>, Object> dslCache = new ConcurrentHashMap<Class<?>, Object>();
private static final ConcurrentMap<Class<?>, Metadata> metadataForEntity = new ConcurrentHashMap<Class<?>, Metadata>();
private static final Set<HelenusSession> sessions = new HashSet<HelenusSession>();
private static volatile HelenusSettings settings = new DefaultHelenusSettings();
private static volatile HelenusSession singleton;
private Helenus() {}
private Helenus() {
}
protected static void setSession(HelenusSession session) {
sessions.add(session);
@ -50,8 +54,7 @@ public final class Helenus {
}
public static void shutdown() {
sessions.forEach(
(session) -> {
sessions.forEach((session) -> {
session.close();
sessions.remove(session);
});
@ -103,10 +106,7 @@ public final class Helenus {
return dsl(iface, classLoader, Optional.empty(), metadata);
}
public static <E> E dsl(
Class<E> iface,
ClassLoader classLoader,
Optional<HelenusPropertyNode> parent,
public static <E> E dsl(Class<E> iface, ClassLoader classLoader, Optional<HelenusPropertyNode> parent,
Metadata metadata) {
Object instance = null;
@ -184,7 +184,9 @@ public final class Helenus {
throw new HelenusMappingException("class is not an interface " + iface);
}
if (metadata != null) {
metadataForEntity.putIfAbsent(iface, metadata);
}
return entity(iface, metadata);
}

View file

@ -17,22 +17,30 @@ package net.helenus.core;
import static net.helenus.core.Query.eq;
import brave.Tracer;
import com.codahale.metrics.MetricRegistry;
import com.datastax.driver.core.*;
import java.io.Closeable;
import java.io.PrintStream;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.*;
import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import java.util.stream.Collectors;
import com.codahale.metrics.MetricRegistry;
import com.datastax.driver.core.*;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.collect.Table;
import brave.Tracer;
import net.helenus.core.cache.CacheUtil;
import net.helenus.core.cache.Facet;
import net.helenus.core.cache.UnboundFacet;
import net.helenus.core.operation.*;
import net.helenus.core.reflect.Drafted;
import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.core.reflect.MapExportable;
import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.MappingUtil;
import net.helenus.mapping.value.*;
@ -51,8 +59,6 @@ public final class HelenusSession extends AbstractSessionOperations implements C
private final Session session;
private final CodecRegistry registry;
private volatile String usingKeyspace;
private volatile boolean showCql;
private final ConsistencyLevel defaultConsistencyLevel;
private final boolean defaultQueryIdempotency;
private final MetricRegistry metricRegistry;
@ -62,30 +68,21 @@ public final class HelenusSession extends AbstractSessionOperations implements C
private final SessionRepository sessionRepository;
private final Executor executor;
private final boolean dropSchemaOnClose;
private final Cache sessionCache;
private final RowColumnValueProvider valueProvider;
private final StatementColumnValuePreparer valuePreparer;
private final Metadata metadata;
private volatile String usingKeyspace;
private volatile boolean showCql;
HelenusSession(
Session session,
String usingKeyspace,
CodecRegistry registry,
boolean showCql,
PrintStream printStream,
SessionRepositoryBuilder sessionRepositoryBuilder,
Executor executor,
boolean dropSchemaOnClose,
ConsistencyLevel consistencyLevel,
boolean defaultQueryIdempotency,
Class<? extends UnitOfWork> unitOfWorkClass,
MetricRegistry metricRegistry,
Tracer tracer) {
HelenusSession(Session session, String usingKeyspace, CodecRegistry registry, boolean showCql,
PrintStream printStream, SessionRepositoryBuilder sessionRepositoryBuilder, Executor executor,
boolean dropSchemaOnClose, ConsistencyLevel consistencyLevel, boolean defaultQueryIdempotency,
Class<? extends UnitOfWork> unitOfWorkClass, MetricRegistry metricRegistry, Tracer tracer) {
this.session = session;
this.registry = registry == null ? CodecRegistry.DEFAULT_INSTANCE : registry;
this.usingKeyspace =
Objects.requireNonNull(
usingKeyspace, "keyspace needs to be selected before creating session");
this.usingKeyspace = Objects.requireNonNull(usingKeyspace,
"keyspace needs to be selected before creating session");
this.showCql = showCql;
this.printStream = printStream;
this.sessionRepository = sessionRepositoryBuilder.build();
@ -97,6 +94,9 @@ public final class HelenusSession extends AbstractSessionOperations implements C
this.metricRegistry = metricRegistry;
this.zipkinTracer = tracer;
this.sessionCache = CacheBuilder.newBuilder().maximumSize(MAX_CACHE_SIZE)
.expireAfterAccess(MAX_CACHE_EXPIRE_SECONDS, TimeUnit.SECONDS).recordStats().build();
this.valueProvider = new RowColumnValueProvider(this.sessionRepository);
this.valuePreparer = new StatementColumnValuePreparer(this.sessionRepository);
this.metadata = session.getCluster().getMetadata();
@ -178,6 +178,112 @@ public final class HelenusSession extends AbstractSessionOperations implements C
return defaultQueryIdempotency;
}
@Override
public Object checkCache(String tableName, List<Facet> facets) {
List<String[]> facetCombinations = CacheUtil.flattenFacets(facets);
Object result = null;
for (String[] combination : facetCombinations) {
String cacheKey = tableName + "." + Arrays.toString(combination);
result = sessionCache.getIfPresent(cacheKey);
if (result != null) {
return result;
}
}
return null;
}
@Override
public void updateCache(Object pojo, List<Facet> facets) {
Map<String, Object> valueMap = pojo instanceof MapExportable ? ((MapExportable) pojo).toMap() : null;
List<Facet> boundFacets = new ArrayList<>();
for (Facet facet : facets) {
if (facet instanceof UnboundFacet) {
UnboundFacet unboundFacet = (UnboundFacet) facet;
UnboundFacet.Binder binder = unboundFacet.binder();
unboundFacet.getProperties().forEach(prop -> {
if (valueMap == null) {
Object value = BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
binder.setValueForProperty(prop, value.toString());
} else {
binder.setValueForProperty(prop, valueMap.get(prop.getPropertyName()).toString());
}
});
if (binder.isBound()) {
boundFacets.add(binder.bind());
}
} else {
boundFacets.add(facet);
}
}
String tableName = CacheUtil.schemaName(facets);
List<String[]> facetCombinations = CacheUtil.flattenFacets(boundFacets);
Object value = sessionCache.getIfPresent(pojo);
Object mergedValue = null;
for (String[] combination : facetCombinations) {
String cacheKey = tableName + "." + Arrays.toString(combination);
if (value == null) {
sessionCache.put(cacheKey, pojo);
} else {
if (mergedValue == null) {
mergedValue = pojo;
} else {
mergedValue = CacheUtil.merge(value, pojo);
}
sessionCache.put(mergedValue, pojo);
}
}
}
@Override
public void mergeCache(Table<String, String, Object> uowCache) {
List<Object> pojos = uowCache.values().stream().distinct().collect(Collectors.toList());
for (Object pojo : pojos) {
HelenusEntity entity = Helenus.resolve(MappingUtil.getMappingInterface(pojo));
Map<String, Object> valueMap = pojo instanceof MapExportable ? ((MapExportable) pojo).toMap() : null;
if (entity.isCacheable()) {
List<Facet> boundFacets = new ArrayList<>();
for (Facet facet : entity.getFacets()) {
if (facet instanceof UnboundFacet) {
UnboundFacet unboundFacet = (UnboundFacet) facet;
UnboundFacet.Binder binder = unboundFacet.binder();
unboundFacet.getProperties().forEach(prop -> {
if (valueMap == null) {
Object value = BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
binder.setValueForProperty(prop, value.toString());
} else {
binder.setValueForProperty(prop, valueMap.get(prop.getPropertyName()).toString());
}
});
if (binder.isBound()) {
boundFacets.add(binder.bind());
}
} else {
boundFacets.add(facet);
}
}
String tableName = entity.getName().toCql();
// NOTE: should equal `String tableName = CacheUtil.schemaName(facets);`
List<String[]> facetCombinations = CacheUtil.flattenFacets(boundFacets);
Object value = sessionCache.getIfPresent(pojo);
Object mergedValue = null;
for (String[] combination : facetCombinations) {
String cacheKey = tableName + "." + Arrays.toString(combination);
if (value == null) {
sessionCache.put(cacheKey, pojo);
} else {
if (mergedValue == null) {
mergedValue = pojo;
} else {
mergedValue = CacheUtil.merge(value, pojo);
}
sessionCache.put(mergedValue, pojo);
}
}
}
}
}
public Metadata getMetadata() {
return metadata;
}
@ -189,35 +295,26 @@ public final class HelenusSession extends AbstractSessionOperations implements C
public synchronized UnitOfWork begin(UnitOfWork parent) {
try {
Class<? extends UnitOfWork> clazz = unitOfWorkClass;
Constructor<? extends UnitOfWork> ctor =
clazz.getConstructor(HelenusSession.class, UnitOfWork.class);
Constructor<? extends UnitOfWork> ctor = clazz.getConstructor(HelenusSession.class, UnitOfWork.class);
UnitOfWork uow = ctor.newInstance(this, parent);
if (parent != null) {
parent.addNestedUnitOfWork(uow);
}
return uow.begin();
} catch (NoSuchMethodException
| InvocationTargetException
| InstantiationException
} catch (NoSuchMethodException | InvocationTargetException | InstantiationException
| IllegalAccessException e) {
throw new HelenusException(
String.format(
"Unable to instantiate {} as a UnitOfWork.", unitOfWorkClass.getSimpleName()),
e);
String.format("Unable to instantiate {} as a UnitOfWork.", unitOfWorkClass.getSimpleName()), e);
}
}
public <E> SelectOperation<E> select(E pojo) {
Objects.requireNonNull(
pojo, "supplied object must be a dsl for a registered entity but cannot be null");
Objects.requireNonNull(pojo, "supplied object must be a dsl for a registered entity but cannot be null");
ColumnValueProvider valueProvider = getValueProvider();
HelenusEntity entity = Helenus.resolve(pojo);
Class<?> entityClass = entity.getMappingInterface();
return new SelectOperation<E>(
this,
entity,
(r) -> {
return new SelectOperation<E>(this, entity, (r) -> {
Map<String, Object> map = new ValueProviderMap(r, valueProvider, entity);
return (E) Helenus.map(entityClass, map);
});
@ -228,10 +325,7 @@ public final class HelenusSession extends AbstractSessionOperations implements C
ColumnValueProvider valueProvider = getValueProvider();
HelenusEntity entity = Helenus.entity(entityClass);
return new SelectOperation<E>(
this,
entity,
(r) -> {
return new SelectOperation<E>(this, entity, (r) -> {
Map<String, Object> map = new ValueProviderMap(r, valueProvider, entity);
return (E) Helenus.map(entityClass, map);
});
@ -247,8 +341,7 @@ public final class HelenusSession extends AbstractSessionOperations implements C
}
public <E> SelectOperation<Row> selectAll(E pojo) {
Objects.requireNonNull(
pojo, "supplied object must be a dsl for a registered entity but cannot be null");
Objects.requireNonNull(pojo, "supplied object must be a dsl for a registered entity but cannot be null");
HelenusEntity entity = Helenus.resolve(pojo);
return new SelectOperation<Row>(this, entity);
}
@ -263,8 +356,7 @@ public final class HelenusSession extends AbstractSessionOperations implements C
Objects.requireNonNull(getter1, "field 1 is empty");
HelenusPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
return new SelectOperation<Tuple1<V1>>(
this, new Mappers.Mapper1<V1>(getValueProvider(), p1), p1);
return new SelectOperation<Tuple1<V1>>(this, new Mappers.Mapper1<V1>(getValueProvider(), p1), p1);
}
public <V1, V2> SelectOperation<Tuple2<V1, V2>> select(Getter<V1> getter1, Getter<V2> getter2) {
@ -273,12 +365,12 @@ public final class HelenusSession extends AbstractSessionOperations implements C
HelenusPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
HelenusPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
return new SelectOperation<Fun.Tuple2<V1, V2>>(
this, new Mappers.Mapper2<V1, V2>(getValueProvider(), p1, p2), p1, p2);
return new SelectOperation<Fun.Tuple2<V1, V2>>(this, new Mappers.Mapper2<V1, V2>(getValueProvider(), p1, p2),
p1, p2);
}
public <V1, V2, V3> SelectOperation<Fun.Tuple3<V1, V2, V3>> select(
Getter<V1> getter1, Getter<V2> getter2, Getter<V3> getter3) {
public <V1, V2, V3> SelectOperation<Fun.Tuple3<V1, V2, V3>> select(Getter<V1> getter1, Getter<V2> getter2,
Getter<V3> getter3) {
Objects.requireNonNull(getter1, "field 1 is empty");
Objects.requireNonNull(getter2, "field 2 is empty");
Objects.requireNonNull(getter3, "field 3 is empty");
@ -286,12 +378,12 @@ public final class HelenusSession extends AbstractSessionOperations implements C
HelenusPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
HelenusPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
HelenusPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3);
return new SelectOperation<Fun.Tuple3<V1, V2, V3>>(
this, new Mappers.Mapper3<V1, V2, V3>(getValueProvider(), p1, p2, p3), p1, p2, p3);
return new SelectOperation<Fun.Tuple3<V1, V2, V3>>(this,
new Mappers.Mapper3<V1, V2, V3>(getValueProvider(), p1, p2, p3), p1, p2, p3);
}
public <V1, V2, V3, V4> SelectOperation<Fun.Tuple4<V1, V2, V3, V4>> select(
Getter<V1> getter1, Getter<V2> getter2, Getter<V3> getter3, Getter<V4> getter4) {
public <V1, V2, V3, V4> SelectOperation<Fun.Tuple4<V1, V2, V3, V4>> select(Getter<V1> getter1, Getter<V2> getter2,
Getter<V3> getter3, Getter<V4> getter4) {
Objects.requireNonNull(getter1, "field 1 is empty");
Objects.requireNonNull(getter2, "field 2 is empty");
Objects.requireNonNull(getter3, "field 3 is empty");
@ -301,21 +393,12 @@ public final class HelenusSession extends AbstractSessionOperations implements C
HelenusPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
HelenusPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3);
HelenusPropertyNode p4 = MappingUtil.resolveMappingProperty(getter4);
return new SelectOperation<Fun.Tuple4<V1, V2, V3, V4>>(
this,
new Mappers.Mapper4<V1, V2, V3, V4>(getValueProvider(), p1, p2, p3, p4),
p1,
p2,
p3,
p4);
return new SelectOperation<Fun.Tuple4<V1, V2, V3, V4>>(this,
new Mappers.Mapper4<V1, V2, V3, V4>(getValueProvider(), p1, p2, p3, p4), p1, p2, p3, p4);
}
public <V1, V2, V3, V4, V5> SelectOperation<Fun.Tuple5<V1, V2, V3, V4, V5>> select(
Getter<V1> getter1,
Getter<V2> getter2,
Getter<V3> getter3,
Getter<V4> getter4,
Getter<V5> getter5) {
public <V1, V2, V3, V4, V5> SelectOperation<Fun.Tuple5<V1, V2, V3, V4, V5>> select(Getter<V1> getter1,
Getter<V2> getter2, Getter<V3> getter3, Getter<V4> getter4, Getter<V5> getter5) {
Objects.requireNonNull(getter1, "field 1 is empty");
Objects.requireNonNull(getter2, "field 2 is empty");
Objects.requireNonNull(getter3, "field 3 is empty");
@ -327,23 +410,12 @@ public final class HelenusSession extends AbstractSessionOperations implements C
HelenusPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3);
HelenusPropertyNode p4 = MappingUtil.resolveMappingProperty(getter4);
HelenusPropertyNode p5 = MappingUtil.resolveMappingProperty(getter5);
return new SelectOperation<Fun.Tuple5<V1, V2, V3, V4, V5>>(
this,
new Mappers.Mapper5<V1, V2, V3, V4, V5>(getValueProvider(), p1, p2, p3, p4, p5),
p1,
p2,
p3,
p4,
p5);
return new SelectOperation<Fun.Tuple5<V1, V2, V3, V4, V5>>(this,
new Mappers.Mapper5<V1, V2, V3, V4, V5>(getValueProvider(), p1, p2, p3, p4, p5), p1, p2, p3, p4, p5);
}
public <V1, V2, V3, V4, V5, V6> SelectOperation<Fun.Tuple6<V1, V2, V3, V4, V5, V6>> select(
Getter<V1> getter1,
Getter<V2> getter2,
Getter<V3> getter3,
Getter<V4> getter4,
Getter<V5> getter5,
Getter<V6> getter6) {
public <V1, V2, V3, V4, V5, V6> SelectOperation<Fun.Tuple6<V1, V2, V3, V4, V5, V6>> select(Getter<V1> getter1,
Getter<V2> getter2, Getter<V3> getter3, Getter<V4> getter4, Getter<V5> getter5, Getter<V6> getter6) {
Objects.requireNonNull(getter1, "field 1 is empty");
Objects.requireNonNull(getter2, "field 2 is empty");
Objects.requireNonNull(getter3, "field 3 is empty");
@ -357,26 +429,14 @@ public final class HelenusSession extends AbstractSessionOperations implements C
HelenusPropertyNode p4 = MappingUtil.resolveMappingProperty(getter4);
HelenusPropertyNode p5 = MappingUtil.resolveMappingProperty(getter5);
HelenusPropertyNode p6 = MappingUtil.resolveMappingProperty(getter6);
return new SelectOperation<Tuple6<V1, V2, V3, V4, V5, V6>>(
this,
new Mappers.Mapper6<V1, V2, V3, V4, V5, V6>(getValueProvider(), p1, p2, p3, p4, p5, p6),
p1,
p2,
p3,
p4,
p5,
p6);
return new SelectOperation<Tuple6<V1, V2, V3, V4, V5, V6>>(this,
new Mappers.Mapper6<V1, V2, V3, V4, V5, V6>(getValueProvider(), p1, p2, p3, p4, p5, p6), p1, p2, p3, p4,
p5, p6);
}
public <V1, V2, V3, V4, V5, V6, V7>
SelectOperation<Fun.Tuple7<V1, V2, V3, V4, V5, V6, V7>> select(
Getter<V1> getter1,
Getter<V2> getter2,
Getter<V3> getter3,
Getter<V4> getter4,
Getter<V5> getter5,
Getter<V6> getter6,
Getter<V7> getter7) {
public <V1, V2, V3, V4, V5, V6, V7> SelectOperation<Fun.Tuple7<V1, V2, V3, V4, V5, V6, V7>> select(
Getter<V1> getter1, Getter<V2> getter2, Getter<V3> getter3, Getter<V4> getter4, Getter<V5> getter5,
Getter<V6> getter6, Getter<V7> getter7) {
Objects.requireNonNull(getter1, "field 1 is empty");
Objects.requireNonNull(getter2, "field 2 is empty");
Objects.requireNonNull(getter3, "field 3 is empty");
@ -392,17 +452,9 @@ public final class HelenusSession extends AbstractSessionOperations implements C
HelenusPropertyNode p5 = MappingUtil.resolveMappingProperty(getter5);
HelenusPropertyNode p6 = MappingUtil.resolveMappingProperty(getter6);
HelenusPropertyNode p7 = MappingUtil.resolveMappingProperty(getter7);
return new SelectOperation<Fun.Tuple7<V1, V2, V3, V4, V5, V6, V7>>(
this,
new Mappers.Mapper7<V1, V2, V3, V4, V5, V6, V7>(
getValueProvider(), p1, p2, p3, p4, p5, p6, p7),
p1,
p2,
p3,
p4,
p5,
p6,
p7);
return new SelectOperation<Fun.Tuple7<V1, V2, V3, V4, V5, V6, V7>>(this,
new Mappers.Mapper7<V1, V2, V3, V4, V5, V6, V7>(getValueProvider(), p1, p2, p3, p4, p5, p6, p7), p1, p2,
p3, p4, p5, p6, p7);
}
public CountOperation count() {
@ -430,24 +482,19 @@ public final class HelenusSession extends AbstractSessionOperations implements C
HelenusEntity entity = Helenus.entity(draft.getEntityClass());
// Add all the mutated values contained in the draft.
entity
.getOrderedProperties()
.forEach(
property -> {
entity.getOrderedProperties().forEach(property -> {
switch (property.getColumnType()) {
case PARTITION_KEY:
case CLUSTERING_COLUMN:
case PARTITION_KEY :
case CLUSTERING_COLUMN :
break;
default:
default :
String propertyName = property.getPropertyName();
if (mutatedProperties.contains(propertyName)) {
Object value = map.get(propertyName);
Getter<Object> getter =
new Getter<Object>() {
Getter<Object> getter = new Getter<Object>() {
@Override
public Object get() {
throw new DslPropertyException(
new HelenusPropertyNode(property, Optional.empty()));
throw new DslPropertyException(new HelenusPropertyNode(property, Optional.empty()));
}
};
update.set(getter, value);
@ -455,22 +502,18 @@ public final class HelenusSession extends AbstractSessionOperations implements C
}
});
// Add the partition and clustering keys if they were in the draft (normally the case).
entity
.getOrderedProperties()
.forEach(
property -> {
// Add the partition and clustering keys if they were in the draft (normally the
// case).
entity.getOrderedProperties().forEach(property -> {
switch (property.getColumnType()) {
case PARTITION_KEY:
case CLUSTERING_COLUMN:
case PARTITION_KEY :
case CLUSTERING_COLUMN :
String propertyName = property.getPropertyName();
Object value = map.get(propertyName);
Getter<Object> getter =
new Getter<Object>() {
Getter<Object> getter = new Getter<Object>() {
@Override
public Object get() {
throw new DslPropertyException(
new HelenusPropertyNode(property, Optional.empty()));
throw new DslPropertyException(new HelenusPropertyNode(property, Optional.empty()));
}
};
update.where(getter, eq(value));
@ -498,8 +541,7 @@ public final class HelenusSession extends AbstractSessionOperations implements C
}
public <T> InsertOperation<T> insert(T pojo) {
Objects.requireNonNull(
pojo,
Objects.requireNonNull(pojo,
"supplied object must be either an instance of the entity class or a dsl for it, but cannot be null");
HelenusEntity entity = null;
try {
@ -539,8 +581,7 @@ public final class HelenusSession extends AbstractSessionOperations implements C
}
public <T> InsertOperation<T> upsert(T pojo) {
Objects.requireNonNull(
pojo,
Objects.requireNonNull(pojo,
"supplied object must be either an instance of the entity class or a dsl for it, but cannot be null");
HelenusEntity entity = null;
try {
@ -610,11 +651,11 @@ public final class HelenusSession extends AbstractSessionOperations implements C
private void dropEntity(HelenusEntity entity) {
switch (entity.getType()) {
case TABLE:
case TABLE :
execute(SchemaUtil.dropTable(entity), true);
break;
case UDT:
case UDT :
execute(SchemaUtil.dropUserType(entity), true);
break;
}

View file

@ -16,7 +16,9 @@
package net.helenus.core;
import java.lang.annotation.Annotation;
import javax.validation.ConstraintValidator;
import net.helenus.mapping.HelenusProperty;
import net.helenus.support.HelenusException;
import net.helenus.support.HelenusMappingException;
@ -35,8 +37,7 @@ public enum HelenusValidator implements PropertyValueValidator {
try {
valid = typeless.isValid(value, null);
} catch (ClassCastException e) {
throw new HelenusMappingException(
"validator was used for wrong type '" + value + "' in " + prop, e);
throw new HelenusMappingException("validator was used for wrong type '" + value + "' in " + prop, e);
}
if (!valid) {

View file

@ -15,8 +15,10 @@
*/
package net.helenus.core;
import com.datastax.driver.core.Row;
import java.util.function.Function;
import com.datastax.driver.core.Row;
import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.HelenusProperty;
import net.helenus.mapping.value.ColumnValueProvider;
@ -24,7 +26,8 @@ import net.helenus.support.Fun;
public final class Mappers {
private Mappers() {}
private Mappers() {
}
public static final class Mapper1<A> implements Function<Row, Fun.Tuple1<A>> {
@ -56,8 +59,7 @@ public final class Mappers {
@Override
public Fun.Tuple2<A, B> apply(Row row) {
return new Fun.Tuple2<A, B>(
provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2));
return new Fun.Tuple2<A, B>(provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2));
}
}
@ -68,10 +70,7 @@ public final class Mappers {
private final HelenusProperty p2;
private final HelenusProperty p3;
public Mapper3(
ColumnValueProvider provider,
HelenusPropertyNode p1,
HelenusPropertyNode p2,
public Mapper3(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2,
HelenusPropertyNode p3) {
this.provider = provider;
this.p1 = p1.getProperty();
@ -81,9 +80,7 @@ public final class Mappers {
@Override
public Fun.Tuple3<A, B, C> apply(Row row) {
return new Fun.Tuple3<A, B, C>(
provider.getColumnValue(row, 0, p1),
provider.getColumnValue(row, 1, p2),
return new Fun.Tuple3<A, B, C>(provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2),
provider.getColumnValue(row, 2, p3));
}
}
@ -96,12 +93,8 @@ public final class Mappers {
private final HelenusProperty p3;
private final HelenusProperty p4;
public Mapper4(
ColumnValueProvider provider,
HelenusPropertyNode p1,
HelenusPropertyNode p2,
HelenusPropertyNode p3,
HelenusPropertyNode p4) {
public Mapper4(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2,
HelenusPropertyNode p3, HelenusPropertyNode p4) {
this.provider = provider;
this.p1 = p1.getProperty();
this.p2 = p2.getProperty();
@ -111,27 +104,18 @@ public final class Mappers {
@Override
public Fun.Tuple4<A, B, C, D> apply(Row row) {
return new Fun.Tuple4<A, B, C, D>(
provider.getColumnValue(row, 0, p1),
provider.getColumnValue(row, 1, p2),
provider.getColumnValue(row, 2, p3),
provider.getColumnValue(row, 3, p4));
return new Fun.Tuple4<A, B, C, D>(provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2),
provider.getColumnValue(row, 2, p3), provider.getColumnValue(row, 3, p4));
}
}
public static final class Mapper5<A, B, C, D, E>
implements Function<Row, Fun.Tuple5<A, B, C, D, E>> {
public static final class Mapper5<A, B, C, D, E> implements Function<Row, Fun.Tuple5<A, B, C, D, E>> {
private final ColumnValueProvider provider;
private final HelenusProperty p1, p2, p3, p4, p5;
public Mapper5(
ColumnValueProvider provider,
HelenusPropertyNode p1,
HelenusPropertyNode p2,
HelenusPropertyNode p3,
HelenusPropertyNode p4,
HelenusPropertyNode p5) {
public Mapper5(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2,
HelenusPropertyNode p3, HelenusPropertyNode p4, HelenusPropertyNode p5) {
this.provider = provider;
this.p1 = p1.getProperty();
this.p2 = p2.getProperty();
@ -142,29 +126,19 @@ public final class Mappers {
@Override
public Fun.Tuple5<A, B, C, D, E> apply(Row row) {
return new Fun.Tuple5<A, B, C, D, E>(
provider.getColumnValue(row, 0, p1),
provider.getColumnValue(row, 1, p2),
provider.getColumnValue(row, 2, p3),
provider.getColumnValue(row, 3, p4),
provider.getColumnValue(row, 4, p5));
return new Fun.Tuple5<A, B, C, D, E>(provider.getColumnValue(row, 0, p1),
provider.getColumnValue(row, 1, p2), provider.getColumnValue(row, 2, p3),
provider.getColumnValue(row, 3, p4), provider.getColumnValue(row, 4, p5));
}
}
public static final class Mapper6<A, B, C, D, E, F>
implements Function<Row, Fun.Tuple6<A, B, C, D, E, F>> {
public static final class Mapper6<A, B, C, D, E, F> implements Function<Row, Fun.Tuple6<A, B, C, D, E, F>> {
private final ColumnValueProvider provider;
private final HelenusProperty p1, p2, p3, p4, p5, p6;
public Mapper6(
ColumnValueProvider provider,
HelenusPropertyNode p1,
HelenusPropertyNode p2,
HelenusPropertyNode p3,
HelenusPropertyNode p4,
HelenusPropertyNode p5,
HelenusPropertyNode p6) {
public Mapper6(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2,
HelenusPropertyNode p3, HelenusPropertyNode p4, HelenusPropertyNode p5, HelenusPropertyNode p6) {
this.provider = provider;
this.p1 = p1.getProperty();
this.p2 = p2.getProperty();
@ -176,30 +150,20 @@ public final class Mappers {
@Override
public Fun.Tuple6<A, B, C, D, E, F> apply(Row row) {
return new Fun.Tuple6<A, B, C, D, E, F>(
provider.getColumnValue(row, 0, p1),
provider.getColumnValue(row, 1, p2),
provider.getColumnValue(row, 2, p3),
provider.getColumnValue(row, 3, p4),
provider.getColumnValue(row, 4, p5),
return new Fun.Tuple6<A, B, C, D, E, F>(provider.getColumnValue(row, 0, p1),
provider.getColumnValue(row, 1, p2), provider.getColumnValue(row, 2, p3),
provider.getColumnValue(row, 3, p4), provider.getColumnValue(row, 4, p5),
provider.getColumnValue(row, 5, p6));
}
}
public static final class Mapper7<A, B, C, D, E, F, G>
implements Function<Row, Fun.Tuple7<A, B, C, D, E, F, G>> {
public static final class Mapper7<A, B, C, D, E, F, G> implements Function<Row, Fun.Tuple7<A, B, C, D, E, F, G>> {
private final ColumnValueProvider provider;
private final HelenusProperty p1, p2, p3, p4, p5, p6, p7;
public Mapper7(
ColumnValueProvider provider,
HelenusPropertyNode p1,
HelenusPropertyNode p2,
HelenusPropertyNode p3,
HelenusPropertyNode p4,
HelenusPropertyNode p5,
HelenusPropertyNode p6,
public Mapper7(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2,
HelenusPropertyNode p3, HelenusPropertyNode p4, HelenusPropertyNode p5, HelenusPropertyNode p6,
HelenusPropertyNode p7) {
this.provider = provider;
this.p1 = p1.getProperty();
@ -213,14 +177,10 @@ public final class Mappers {
@Override
public Fun.Tuple7<A, B, C, D, E, F, G> apply(Row row) {
return new Fun.Tuple7<A, B, C, D, E, F, G>(
provider.getColumnValue(row, 0, p1),
provider.getColumnValue(row, 1, p2),
provider.getColumnValue(row, 2, p3),
provider.getColumnValue(row, 3, p4),
provider.getColumnValue(row, 4, p5),
provider.getColumnValue(row, 5, p6),
provider.getColumnValue(row, 6, p7));
return new Fun.Tuple7<A, B, C, D, E, F, G>(provider.getColumnValue(row, 0, p1),
provider.getColumnValue(row, 1, p2), provider.getColumnValue(row, 2, p3),
provider.getColumnValue(row, 3, p4), provider.getColumnValue(row, 4, p5),
provider.getColumnValue(row, 5, p6), provider.getColumnValue(row, 6, p7));
}
}
}

View file

@ -31,8 +31,6 @@ public enum Operator {
LTE("<=");
private final String name;
private static final Map<String, Operator> indexByName = new HashMap<String, Operator>();
static {
@ -41,15 +39,17 @@ public enum Operator {
}
}
private final String name;
private Operator(String name) {
this.name = name;
}
public String getName() {
return name;
}
public static Operator findByOperator(String name) {
return indexByName.get(name);
}
public String getName() {
return name;
}
}

View file

@ -1,8 +1,10 @@
package net.helenus.core;
import java.util.Objects;
import com.datastax.driver.core.querybuilder.Ordering;
import com.datastax.driver.core.querybuilder.QueryBuilder;
import java.util.Objects;
import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.ColumnType;
import net.helenus.mapping.MappingUtil;
@ -32,10 +34,10 @@ public final class Ordered {
}
switch (direction) {
case ASC:
case ASC :
return QueryBuilder.asc(propNode.getColumnName());
case DESC:
case DESC :
return QueryBuilder.desc(propNode.getColumnName());
}

View file

@ -1,6 +1,6 @@
package net.helenus.core;
import java.util.*;
import java.util.List;
import java.util.Objects;
public class PostCommitFunction<T, R> implements java.util.function.Function<T, R> {

View file

@ -17,6 +17,7 @@ package net.helenus.core;
import com.datastax.driver.core.querybuilder.Clause;
import com.datastax.driver.core.querybuilder.QueryBuilder;
import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.value.ColumnValuePreparer;
import net.helenus.support.HelenusMappingException;
@ -38,38 +39,42 @@ public final class Postulate<V> {
public Clause getClause(HelenusPropertyNode node, ColumnValuePreparer valuePreparer) {
switch (operator) {
case EQ:
return QueryBuilder.eq(
node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty()));
case EQ :
return QueryBuilder.eq(node.getColumnName(),
valuePreparer.prepareColumnValue(values[0], node.getProperty()));
case IN:
case IN :
Object[] preparedValues = new Object[values.length];
for (int i = 0; i != values.length; ++i) {
preparedValues[i] = valuePreparer.prepareColumnValue(values[i], node.getProperty());
}
return QueryBuilder.in(node.getColumnName(), preparedValues);
case LT:
return QueryBuilder.lt(
node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty()));
case LT :
return QueryBuilder.lt(node.getColumnName(),
valuePreparer.prepareColumnValue(values[0], node.getProperty()));
case LTE:
return QueryBuilder.lte(
node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty()));
case LTE :
return QueryBuilder.lte(node.getColumnName(),
valuePreparer.prepareColumnValue(values[0], node.getProperty()));
case GT:
return QueryBuilder.gt(
node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty()));
case GT :
return QueryBuilder.gt(node.getColumnName(),
valuePreparer.prepareColumnValue(values[0], node.getProperty()));
case GTE:
return QueryBuilder.gte(
node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty()));
case GTE :
return QueryBuilder.gte(node.getColumnName(),
valuePreparer.prepareColumnValue(values[0], node.getProperty()));
default:
default :
throw new HelenusMappingException("unknown filter operation " + operator);
}
}
public V[] values() {
return values;
}
@Override
public String toString() {

View file

@ -15,17 +15,20 @@
*/
package net.helenus.core;
import com.datastax.driver.core.querybuilder.BindMarker;
import com.datastax.driver.core.querybuilder.QueryBuilder;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import com.datastax.driver.core.querybuilder.BindMarker;
import com.datastax.driver.core.querybuilder.QueryBuilder;
import net.helenus.mapping.OrderingDirection;
/** Sugar methods for the queries */
public final class Query {
private Query() {}
private Query() {
}
public static BindMarker marker() {
return QueryBuilder.bindMarker();

View file

@ -15,15 +15,16 @@
*/
package net.helenus.core;
import java.util.*;
import java.util.stream.Collectors;
import com.datastax.driver.core.*;
import com.datastax.driver.core.IndexMetadata;
import com.datastax.driver.core.querybuilder.IsNotNullClause;
import com.datastax.driver.core.querybuilder.QueryBuilder;
import com.datastax.driver.core.querybuilder.Select;
import com.datastax.driver.core.schemabuilder.*;
import com.datastax.driver.core.schemabuilder.Create.Options;
import java.util.*;
import java.util.stream.Collectors;
import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.*;
import net.helenus.mapping.ColumnType;
@ -32,10 +33,10 @@ import net.helenus.mapping.type.OptionalColumnMetadata;
import net.helenus.support.CqlUtil;
import net.helenus.support.HelenusMappingException;
public final class SchemaUtil {
private SchemaUtil() {}
private SchemaUtil() {
}
public static RegularStatement use(String keyspace, boolean forceQuote) {
if (forceQuote) {
@ -58,31 +59,23 @@ public final class SchemaUtil {
ColumnType columnType = prop.getColumnType();
if (columnType == ColumnType.PARTITION_KEY || columnType == ColumnType.CLUSTERING_COLUMN) {
throw new HelenusMappingException(
"primary key columns are not supported in UserDefinedType for "
+ prop.getPropertyName()
+ " in entity "
+ entity);
throw new HelenusMappingException("primary key columns are not supported in UserDefinedType for "
+ prop.getPropertyName() + " in entity " + entity);
}
try {
prop.getDataType().addColumn(create, prop.getColumnName());
} catch (IllegalArgumentException e) {
throw new HelenusMappingException(
"invalid column name '"
+ prop.getColumnName()
+ "' in entity '"
+ entity.getName().getName()
+ "'",
e);
throw new HelenusMappingException("invalid column name '" + prop.getColumnName() + "' in entity '"
+ entity.getName().getName() + "'", e);
}
}
return create;
}
public static List<SchemaStatement> alterUserType(
UserType userType, HelenusEntity entity, boolean dropUnusedColumns) {
public static List<SchemaStatement> alterUserType(UserType userType, HelenusEntity entity,
boolean dropUnusedColumns) {
if (entity.getType() != HelenusEntityType.UDT) {
throw new HelenusMappingException("expected UDT entity " + entity);
@ -91,13 +84,12 @@ public final class SchemaUtil {
List<SchemaStatement> result = new ArrayList<SchemaStatement>();
/**
* TODO: In future replace SchemaBuilder.alterTable by SchemaBuilder.alterType when it will
* exist
* TODO: In future replace SchemaBuilder.alterTable by SchemaBuilder.alterType
* when it will exist
*/
Alter alter = SchemaBuilder.alterTable(entity.getName().toCql());
final Set<String> visitedColumns =
dropUnusedColumns ? new HashSet<String>() : Collections.<String>emptySet();
final Set<String> visitedColumns = dropUnusedColumns ? new HashSet<String>() : Collections.<String>emptySet();
for (HelenusProperty prop : entity.getOrderedProperties()) {
@ -114,9 +106,8 @@ public final class SchemaUtil {
}
DataType dataType = userType.getFieldType(columnName);
SchemaStatement stmt =
prop.getDataType()
.alterColumn(alter, prop.getColumnName(), optional(columnName, dataType));
SchemaStatement stmt = prop.getDataType().alterColumn(alter, prop.getColumnName(),
optional(columnName, dataType));
if (stmt != null) {
result.add(stmt);
@ -149,8 +140,29 @@ public final class SchemaUtil {
return SchemaBuilder.dropType(type.getTypeName()).ifExists();
}
public static SchemaStatement createMaterializedView(
String keyspace, String viewName, HelenusEntity entity) {
public static String createPrimaryKeyPhrase(Collection<HelenusProperty> properties) {
List<String> p = new ArrayList<String>(properties.size());
List<String> c = new ArrayList<String>(properties.size());
for (HelenusProperty prop : properties) {
String columnName = prop.getColumnName().toCql();
switch (prop.getColumnType()) {
case PARTITION_KEY :
p.add(columnName);
break;
case CLUSTERING_COLUMN :
c.add(columnName);
break;
default :
break;
}
}
return "(" + ((p.size() > 1) ? "(" + String.join(", ", p) + ")" : p.get(0))
+ ((c.size() > 0) ? ", " + ((c.size() > 1) ? "(" + String.join(", ", c) + ")" : c.get(0)) : "") + ")";
}
public static SchemaStatement createMaterializedView(String keyspace, String viewName, HelenusEntity entity) {
if (entity.getType() != HelenusEntityType.VIEW) {
throw new HelenusMappingException("expected view entity " + entity);
}
@ -160,10 +172,7 @@ public final class SchemaUtil {
}
List<HelenusPropertyNode> props = new ArrayList<HelenusPropertyNode>();
entity
.getOrderedProperties()
.stream()
.map(p -> new HelenusPropertyNode(p, Optional.empty()))
entity.getOrderedProperties().stream().map(p -> new HelenusPropertyNode(p, Optional.empty()))
.forEach(p -> props.add(p));
Select.Selection selection = QueryBuilder.select();
@ -175,49 +184,39 @@ public final class SchemaUtil {
Class<?> iface = entity.getMappingInterface();
String tableName = Helenus.entity(iface.getInterfaces()[0]).getName().toCql();
Select.Where where = selection.from(tableName).where();
List<String> p = new ArrayList<String>(props.size());
List<String> c = new ArrayList<String>(props.size());
List<String> o = new ArrayList<String>(props.size());
for (HelenusPropertyNode prop : props) {
String columnName = prop.getColumnName();
switch (prop.getProperty().getColumnType()) {
case PARTITION_KEY:
p.add(columnName);
case PARTITION_KEY :
where = where.and(new IsNotNullClause(columnName));
break;
case CLUSTERING_COLUMN:
c.add(columnName);
case CLUSTERING_COLUMN :
where = where.and(new IsNotNullClause(columnName));
ClusteringColumn clusteringColumn = prop.getProperty().getGetterMethod().getAnnotation(ClusteringColumn.class);
ClusteringColumn clusteringColumn = prop.getProperty().getGetterMethod()
.getAnnotation(ClusteringColumn.class);
if (clusteringColumn != null && clusteringColumn.ordering() != null) {
o.add(columnName + " " + clusteringColumn.ordering().cql());
}
break;
default:
default :
break;
}
}
String primaryKey =
"PRIMARY KEY ("
+ ((p.size() > 1) ? "(" + String.join(", ", p) + ")" : p.get(0))
+ ((c.size() > 0)
? ", " + ((c.size() > 1) ? "(" + String.join(", ", c) + ")" : c.get(0))
: "")
+ ")";
String primaryKey = "PRIMARY KEY " + createPrimaryKeyPhrase(entity.getOrderedProperties());
String clustering = "";
if (o.size() > 0) {
clustering = "WITH CLUSTERING ORDER BY (" + String.join(", ", o) + ")";
}
return new CreateMaterializedView(keyspace, viewName, where, primaryKey, clustering);
return new CreateMaterializedView(keyspace, viewName, where, primaryKey, clustering).ifNotExists();
}
public static SchemaStatement dropMaterializedView(
String keyspace, String viewName, HelenusEntity entity) {
public static SchemaStatement dropMaterializedView(String keyspace, String viewName, HelenusEntity entity) {
return new DropMaterializedView(keyspace, viewName);
}
@ -250,15 +249,14 @@ public final class SchemaUtil {
if (!clusteringColumns.isEmpty()) {
Options options = create.withOptions();
clusteringColumns.forEach(
p -> options.clusteringOrder(p.getColumnName().toCql(), mapDirection(p.getOrdering())));
clusteringColumns
.forEach(p -> options.clusteringOrder(p.getColumnName().toCql(), mapDirection(p.getOrdering())));
}
return create;
}
public static List<SchemaStatement> alterTable(
TableMetadata tmd, HelenusEntity entity, boolean dropUnusedColumns) {
public static List<SchemaStatement> alterTable(TableMetadata tmd, HelenusEntity entity, boolean dropUnusedColumns) {
if (entity.getType() != HelenusEntityType.TABLE) {
throw new HelenusMappingException("expected table entity " + entity);
@ -268,8 +266,7 @@ public final class SchemaUtil {
Alter alter = SchemaBuilder.alterTable(entity.getName().toCql());
final Set<String> visitedColumns =
dropUnusedColumns ? new HashSet<String>() : Collections.<String>emptySet();
final Set<String> visitedColumns = dropUnusedColumns ? new HashSet<String>() : Collections.<String>emptySet();
for (HelenusProperty prop : entity.getOrderedProperties()) {
@ -286,8 +283,8 @@ public final class SchemaUtil {
}
ColumnMetadata columnMetadata = tmd.getColumn(columnName);
SchemaStatement stmt =
prop.getDataType().alterColumn(alter, prop.getColumnName(), optional(columnMetadata));
SchemaStatement stmt = prop.getDataType().alterColumn(alter, prop.getColumnName(),
optional(columnMetadata));
if (stmt != null) {
result.add(stmt);
@ -317,42 +314,28 @@ public final class SchemaUtil {
public static SchemaStatement createIndex(HelenusProperty prop) {
if (prop.caseSensitiveIndex()) {
return SchemaBuilder.createIndex(prop.getIndexName().get().toCql())
.ifNotExists()
.onTable(prop.getEntity().getName().toCql())
.andColumn(prop.getColumnName().toCql());
return SchemaBuilder.createIndex(prop.getIndexName().get().toCql()).ifNotExists()
.onTable(prop.getEntity().getName().toCql()).andColumn(prop.getColumnName().toCql());
} else {
return new CreateSasiIndex(prop.getIndexName().get().toCql())
.ifNotExists()
.onTable(prop.getEntity().getName().toCql())
.andColumn(prop.getColumnName().toCql());
return new CreateSasiIndex(prop.getIndexName().get().toCql()).ifNotExists()
.onTable(prop.getEntity().getName().toCql()).andColumn(prop.getColumnName().toCql());
}
}
public static List<SchemaStatement> createIndexes(HelenusEntity entity) {
return entity
.getOrderedProperties()
.stream()
.filter(p -> p.getIndexName().isPresent())
.map(p -> SchemaUtil.createIndex(p))
.collect(Collectors.toList());
return entity.getOrderedProperties().stream().filter(p -> p.getIndexName().isPresent())
.map(p -> SchemaUtil.createIndex(p)).collect(Collectors.toList());
}
public static List<SchemaStatement> alterIndexes(
TableMetadata tmd, HelenusEntity entity, boolean dropUnusedIndexes) {
public static List<SchemaStatement> alterIndexes(TableMetadata tmd, HelenusEntity entity,
boolean dropUnusedIndexes) {
List<SchemaStatement> list = new ArrayList<SchemaStatement>();
final Set<String> visitedColumns =
dropUnusedIndexes ? new HashSet<String>() : Collections.<String>emptySet();
final Set<String> visitedColumns = dropUnusedIndexes ? new HashSet<String>() : Collections.<String>emptySet();
entity
.getOrderedProperties()
.stream()
.filter(p -> p.getIndexName().isPresent())
.forEach(
p -> {
entity.getOrderedProperties().stream().filter(p -> p.getIndexName().isPresent()).forEach(p -> {
String columnName = p.getColumnName().getName();
if (dropUnusedIndexes) {
@ -373,11 +356,9 @@ public final class SchemaUtil {
if (dropUnusedIndexes) {
tmd.getColumns()
.stream()
tmd.getColumns().stream()
.filter(c -> tmd.getIndex(c.getName()) != null && !visitedColumns.contains(c.getName()))
.forEach(
c -> {
.forEach(c -> {
list.add(SchemaBuilder.dropIndex(tmd.getIndex(c.getName()).getName()).ifExists());
});
}
@ -391,9 +372,9 @@ public final class SchemaUtil {
private static SchemaBuilder.Direction mapDirection(OrderingDirection o) {
switch (o) {
case ASC:
case ASC :
return SchemaBuilder.Direction.ASC;
case DESC:
case DESC :
return SchemaBuilder.Direction.DESC;
}
throw new HelenusMappingException("unknown ordering " + o);
@ -403,10 +384,7 @@ public final class SchemaUtil {
throw new HelenusMappingException(
"only primitive types and Set,List,Map collections and UserDefinedTypes are allowed, unknown type for property '"
+ prop.getPropertyName()
+ "' type is '"
+ prop.getJavaType()
+ "' in the entity "
+ prop.getPropertyName() + "' type is '" + prop.getJavaType() + "' in the entity "
+ prop.getEntity());
}

View file

@ -15,16 +15,18 @@
*/
package net.helenus.core;
import brave.Tracer;
import com.codahale.metrics.MetricRegistry;
import com.datastax.driver.core.*;
import com.google.common.util.concurrent.MoreExecutors;
import java.io.IOException;
import java.io.PrintStream;
import java.util.*;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
import java.util.function.Consumer;
import com.codahale.metrics.MetricRegistry;
import com.datastax.driver.core.*;
import com.google.common.util.concurrent.MoreExecutors;
import brave.Tracer;
import net.helenus.core.reflect.DslExportable;
import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.HelenusEntityType;
@ -38,6 +40,7 @@ import net.helenus.support.PackageUtil;
public final class SessionInitializer extends AbstractSessionOperations {
private final Session session;
private final List<Either<Object, Class<?>>> initList = new ArrayList<Either<Object, Class<?>>>();
private CodecRegistry registry;
private String usingKeyspace;
private boolean showCql = false;
@ -48,15 +51,10 @@ public final class SessionInitializer extends AbstractSessionOperations {
private PrintStream printStream = System.out;
private Executor executor = MoreExecutors.directExecutor();
private Class<? extends UnitOfWork> unitOfWorkClass = UnitOfWorkImpl.class;
private SessionRepositoryBuilder sessionRepository;
private boolean dropUnusedColumns = false;
private boolean dropUnusedIndexes = false;
private KeyspaceMetadata keyspaceMetadata;
private final List<Either<Object, Class<?>>> initList = new ArrayList<Either<Object, Class<?>>>();
private AutoDdl autoDdl = AutoDdl.UPDATE;
SessionInitializer(Session session) {
@ -181,11 +179,8 @@ public final class SessionInitializer extends AbstractSessionOperations {
public SessionInitializer addPackage(String packageName) {
try {
PackageUtil.getClasses(packageName)
.stream()
.filter(c -> c.isInterface() && !c.isAnnotation())
.forEach(
clazz -> {
PackageUtil.getClasses(packageName).stream().filter(c -> c.isInterface() && !c.isAnnotation())
.forEach(clazz -> {
initList.add(Either.right(clazz));
});
} catch (IOException | ClassNotFoundException e) {
@ -247,19 +242,8 @@ public final class SessionInitializer extends AbstractSessionOperations {
public synchronized HelenusSession get() {
initialize();
return new HelenusSession(
session,
usingKeyspace,
registry,
showCql,
printStream,
sessionRepository,
executor,
autoDdl == AutoDdl.CREATE_DROP,
consistencyLevel,
idempotent,
unitOfWorkClass,
metricRegistry,
return new HelenusSession(session, usingKeyspace, registry, showCql, printStream, sessionRepository, executor,
autoDdl == AutoDdl.CREATE_DROP, consistencyLevel, idempotent, unitOfWorkClass, metricRegistry,
zipkinTracer);
}
@ -267,8 +251,7 @@ public final class SessionInitializer extends AbstractSessionOperations {
Objects.requireNonNull(usingKeyspace, "please define keyspace by 'use' operator");
initList.forEach(
(either) -> {
initList.forEach((either) -> {
Class<?> iface = null;
if (either.isLeft()) {
iface = MappingUtil.getMappingInterface(either.getLeft());
@ -277,7 +260,7 @@ public final class SessionInitializer extends AbstractSessionOperations {
}
DslExportable dsl = (DslExportable) Helenus.dsl(iface);
dsl.setCassandraMetadataForHelenusSesion(session.getCluster().getMetadata());
dsl.setCassandraMetadataForHelenusSession(session.getCluster().getMetadata());
sessionRepository.add(dsl);
});
@ -285,74 +268,53 @@ public final class SessionInitializer extends AbstractSessionOperations {
UserTypeOperations userTypeOps = new UserTypeOperations(this, dropUnusedColumns);
switch (autoDdl) {
case CREATE_DROP:
case CREATE_DROP :
// Drop view first, otherwise a `DROP TABLE ...` will fail as the type is still referenced
// Drop view first, otherwise a `DROP TABLE ...` will fail as the type is still
// referenced
// by a view.
sessionRepository
.entities()
.stream()
.filter(e -> e.getType() == HelenusEntityType.VIEW)
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.VIEW)
.forEach(e -> tableOps.dropView(e));
// Drop tables second, before DROP TYPE otherwise a `DROP TYPE ...` will fail as the type is
// Drop tables second, before DROP TYPE otherwise a `DROP TYPE ...` will fail as
// the type is
// still referenced by a table.
sessionRepository
.entities()
.stream()
.filter(e -> e.getType() == HelenusEntityType.TABLE)
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.TABLE)
.forEach(e -> tableOps.dropTable(e));
eachUserTypeInReverseOrder(userTypeOps, e -> userTypeOps.dropUserType(e));
// FALLTHRU to CREATE case (read: the absence of a `break;` statement here is intentional!)
case CREATE:
// FALLTHRU to CREATE case (read: the absence of a `break;` statement here is
// intentional!)
case CREATE :
eachUserTypeInOrder(userTypeOps, e -> userTypeOps.createUserType(e));
sessionRepository
.entities()
.stream()
.filter(e -> e.getType() == HelenusEntityType.TABLE)
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.TABLE)
.forEach(e -> tableOps.createTable(e));
sessionRepository
.entities()
.stream()
.filter(e -> e.getType() == HelenusEntityType.VIEW)
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.VIEW)
.forEach(e -> tableOps.createView(e));
break;
case VALIDATE:
case VALIDATE :
eachUserTypeInOrder(userTypeOps, e -> userTypeOps.validateUserType(getUserType(e), e));
sessionRepository
.entities()
.stream()
.filter(e -> e.getType() == HelenusEntityType.TABLE)
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.TABLE)
.forEach(e -> tableOps.validateTable(getTableMetadata(e), e));
break;
case UPDATE:
case UPDATE :
eachUserTypeInOrder(userTypeOps, e -> userTypeOps.updateUserType(getUserType(e), e));
sessionRepository
.entities()
.stream()
.filter(e -> e.getType() == HelenusEntityType.VIEW)
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.VIEW)
.forEach(e -> tableOps.dropView(e));
sessionRepository
.entities()
.stream()
.filter(e -> e.getType() == HelenusEntityType.TABLE)
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.TABLE)
.forEach(e -> tableOps.updateTable(getTableMetadata(e), e));
sessionRepository
.entities()
.stream()
.filter(e -> e.getType() == HelenusEntityType.VIEW)
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.VIEW)
.forEach(e -> tableOps.createView(e));
break;
}
@ -364,41 +326,27 @@ public final class SessionInitializer extends AbstractSessionOperations {
}
}
private void eachUserTypeInOrder(
UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) {
private void eachUserTypeInOrder(UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) {
Set<HelenusEntity> processedSet = new HashSet<HelenusEntity>();
Set<HelenusEntity> stack = new HashSet<HelenusEntity>();
sessionRepository
.entities()
.stream()
.filter(e -> e.getType() == HelenusEntityType.UDT)
.forEach(
e -> {
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.UDT).forEach(e -> {
stack.clear();
eachUserTypeInRecursion(e, processedSet, stack, userTypeOps, action);
});
}
private void eachUserTypeInReverseOrder(
UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) {
private void eachUserTypeInReverseOrder(UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) {
ArrayDeque<HelenusEntity> deque = new ArrayDeque<>();
eachUserTypeInOrder(userTypeOps, e -> deque.addFirst(e));
deque
.stream()
.forEach(
e -> {
deque.stream().forEach(e -> {
action.accept(e);
});
}
private void eachUserTypeInRecursion(
HelenusEntity e,
Set<HelenusEntity> processedSet,
Set<HelenusEntity> stack,
UserTypeOperations userTypeOps,
Consumer<? super HelenusEntity> action) {
private void eachUserTypeInRecursion(HelenusEntity e, Set<HelenusEntity> processedSet, Set<HelenusEntity> stack,
UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) {
stack.add(e);
@ -419,8 +367,7 @@ public final class SessionInitializer extends AbstractSessionOperations {
private KeyspaceMetadata getKeyspaceMetadata() {
if (keyspaceMetadata == null) {
keyspaceMetadata =
session.getCluster().getMetadata().getKeyspace(usingKeyspace.toLowerCase());
keyspaceMetadata = session.getCluster().getMetadata().getKeyspace(usingKeyspace.toLowerCase());
}
return keyspaceMetadata;
}

View file

@ -15,9 +15,11 @@
*/
package net.helenus.core;
import java.util.Collection;
import com.datastax.driver.core.UserType;
import com.google.common.collect.ImmutableMap;
import java.util.Collection;
import net.helenus.mapping.HelenusEntity;
public final class SessionRepository {
@ -30,8 +32,7 @@ public final class SessionRepository {
userTypeMap = ImmutableMap.<String, UserType>builder().putAll(builder.getUserTypeMap()).build();
entityMap =
ImmutableMap.<Class<?>, HelenusEntity>builder().putAll(builder.getEntityMap()).build();
entityMap = ImmutableMap.<Class<?>, HelenusEntity>builder().putAll(builder.getEntityMap()).build();
}
public UserType findUserType(String name) {

View file

@ -15,15 +15,17 @@
*/
package net.helenus.core;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import com.datastax.driver.core.Session;
import com.datastax.driver.core.UDTValue;
import com.datastax.driver.core.UserType;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.HelenusEntityType;
import net.helenus.mapping.HelenusProperty;
@ -33,8 +35,7 @@ import net.helenus.support.HelenusMappingException;
public final class SessionRepositoryBuilder {
private static final Optional<HelenusEntityType> OPTIONAL_UDT =
Optional.of(HelenusEntityType.UDT);
private static final Optional<HelenusEntityType> OPTIONAL_UDT = Optional.of(HelenusEntityType.UDT);
private final Map<Class<?>, HelenusEntity> entityMap = new HashMap<Class<?>, HelenusEntity>();
@ -98,8 +99,7 @@ public final class SessionRepositoryBuilder {
entity = helenusEntity;
if (type.isPresent() && entity.getType() != type.get()) {
throw new HelenusMappingException(
"unexpected entity type " + entity.getType() + " for " + entity);
throw new HelenusMappingException("unexpected entity type " + entity.getType() + " for " + entity);
}
HelenusEntity concurrentEntity = entityMap.putIfAbsent(iface, entity);

View file

@ -15,9 +15,11 @@
*/
package net.helenus.core;
import java.util.List;
import com.datastax.driver.core.TableMetadata;
import com.datastax.driver.core.schemabuilder.SchemaStatement;
import java.util.List;
import net.helenus.mapping.HelenusEntity;
import net.helenus.support.HelenusException;
@ -27,8 +29,7 @@ public final class TableOperations {
private final boolean dropUnusedColumns;
private final boolean dropUnusedIndexes;
public TableOperations(
AbstractSessionOperations sessionOps, boolean dropUnusedColumns, boolean dropUnusedIndexes) {
public TableOperations(AbstractSessionOperations sessionOps, boolean dropUnusedColumns, boolean dropUnusedIndexes) {
this.sessionOps = sessionOps;
this.dropUnusedColumns = dropUnusedColumns;
this.dropUnusedIndexes = dropUnusedIndexes;
@ -47,10 +48,7 @@ public final class TableOperations {
if (tmd == null) {
throw new HelenusException(
"table does not exists "
+ entity.getName()
+ "for entity "
+ entity.getMappingInterface());
"table does not exists " + entity.getName() + "for entity " + entity.getMappingInterface());
}
List<SchemaStatement> list = SchemaUtil.alterTable(tmd, entity, dropUnusedColumns);
@ -59,10 +57,7 @@ public final class TableOperations {
if (!list.isEmpty()) {
throw new HelenusException(
"schema changed for entity "
+ entity.getMappingInterface()
+ ", apply this command: "
+ list);
"schema changed for entity " + entity.getMappingInterface() + ", apply this command: " + list);
}
}
@ -78,17 +73,14 @@ public final class TableOperations {
public void createView(HelenusEntity entity) {
sessionOps.execute(
SchemaUtil.createMaterializedView(
sessionOps.usingKeyspace(), entity.getName().toCql(), entity),
true);
// executeBatch(SchemaUtil.createIndexes(entity)); NOTE: Unfortunately C* 3.10 does not yet support 2i on materialized views.
SchemaUtil.createMaterializedView(sessionOps.usingKeyspace(), entity.getName().toCql(), entity), true);
// executeBatch(SchemaUtil.createIndexes(entity)); NOTE: Unfortunately C* 3.10
// does not yet support 2i on materialized views.
}
public void dropView(HelenusEntity entity) {
sessionOps.execute(
SchemaUtil.dropMaterializedView(
sessionOps.usingKeyspace(), entity.getName().toCql(), entity),
true);
SchemaUtil.dropMaterializedView(sessionOps.usingKeyspace(), entity.getName().toCql(), entity), true);
}
public void updateView(TableMetadata tmd, HelenusEntity entity) {
@ -103,8 +95,7 @@ public final class TableOperations {
private void executeBatch(List<SchemaStatement> list) {
list.forEach(
s -> {
list.forEach(s -> {
sessionOps.execute(s, true);
});
}

View file

@ -15,33 +15,39 @@
*/
package net.helenus.core;
import java.util.List;
import java.util.Optional;
import java.util.Map;
import java.util.Set;
import com.google.common.base.Stopwatch;
public interface UnitOfWork<E extends Exception> extends AutoCloseable {
import net.helenus.core.cache.Facet;
public interface UnitOfWork<X extends Exception> extends AutoCloseable {
/**
* Marks the beginning of a transactional section of work. Will write a record to the shared
* write-ahead log.
* Marks the beginning of a transactional section of work. Will write a record
* to the shared write-ahead log.
*
* @return the handle used to commit or abort the work.
*/
UnitOfWork begin();
UnitOfWork<X> begin();
UnitOfWork addNestedUnitOfWork(UnitOfWork uow);
void addNestedUnitOfWork(UnitOfWork<X> uow);
/**
* Checks to see if the work performed between calling begin and now can be committed or not.
* Checks to see if the work performed between calling begin and now can be
* committed or not.
*
* @return a function from which to chain work that only happens when commit is successful
* @throws E when the work overlaps with other concurrent writers.
* @return a function from which to chain work that only happens when commit is
* successful
* @throws X
* when the work overlaps with other concurrent writers.
*/
PostCommitFunction<Void, Void> commit() throws E;
PostCommitFunction<Void, Void> commit() throws X;
/**
* Explicitly abort the work within this unit of work. Any nested aborted unit of work will
* trigger the entire unit of work to commit.
* Explicitly abort the work within this unit of work. Any nested aborted unit
* of work will trigger the entire unit of work to commit.
*/
void abort();
@ -49,8 +55,14 @@ public interface UnitOfWork<E extends Exception> extends AutoCloseable {
boolean hasCommitted();
//Either<Object, Set<Object>> cacheLookup(String key);
Set<Object> cacheLookup(String key);
Optional<Object> cacheLookup(List<Facet> facets);
void cacheUpdate(Object pojo, List<Facet> facets);
UnitOfWork setPurpose(String purpose);
Stopwatch getExecutionTimer();
Stopwatch getCacheLookupTimer();
Map<String, Set<Object>> getCache();
}

View file

@ -15,9 +15,11 @@
*/
package net.helenus.core;
import java.util.List;
import com.datastax.driver.core.UserType;
import com.datastax.driver.core.schemabuilder.SchemaStatement;
import java.util.List;
import net.helenus.mapping.HelenusEntity;
import net.helenus.support.HelenusException;
@ -52,10 +54,7 @@ public final class UserTypeOperations {
if (!list.isEmpty()) {
throw new HelenusException(
"schema changed for entity "
+ entity.getMappingInterface()
+ ", apply this command: "
+ list);
"schema changed for entity " + entity.getMappingInterface() + ", apply this command: " + list);
}
}
@ -71,8 +70,7 @@ public final class UserTypeOperations {
private void executeBatch(List<SchemaStatement> list) {
list.forEach(
s -> {
list.forEach(s -> {
sessionOps.execute(s, true);
});
}

View file

@ -1,3 +1,18 @@
/*
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.helenus.core.annotation;
import java.lang.annotation.ElementType;
@ -7,4 +22,5 @@ import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @interface Cacheable {}
public @interface Cacheable {
}

View file

@ -4,6 +4,7 @@ import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import net.helenus.core.ConflictingUnitOfWorkException;
@Retention(RetentionPolicy.RUNTIME)

View file

@ -0,0 +1,83 @@
package net.helenus.core.aspect;
import java.lang.reflect.Method;
import java.util.Arrays;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.reflect.MethodSignature;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.annotation.AnnotationUtils;
import org.springframework.util.Assert;
import net.helenus.core.annotation.Retry;
@Aspect
public class RetryAspect {
private static final Logger log = LoggerFactory.getLogger(RetryAspect.class);
@Around("@annotation(net.helenus.core.annotations.Retry)")
public Object retry(ProceedingJoinPoint pjp) throws Throwable {
Retry retryAnnotation = getRetryAnnotation(pjp);
return (retryAnnotation != null) ? proceed(pjp, retryAnnotation) : proceed(pjp);
}
private Object proceed(ProceedingJoinPoint pjp) throws Throwable {
return pjp.proceed();
}
private Object proceed(ProceedingJoinPoint pjp, Retry retryAnnotation) throws Throwable {
int times = retryAnnotation.times();
Class<? extends Throwable>[] retryOn = retryAnnotation.on();
Assert.isTrue(times > 0, "@Retry{times} should be greater than 0!");
Assert.isTrue(retryOn.length > 0, "@Retry{on} should have at least one Throwable!");
log.info("Proceed with {} retries on {}", times, Arrays.toString(retryOn));
return tryProceeding(pjp, times, retryOn);
}
private Object tryProceeding(ProceedingJoinPoint pjp, int times, Class<? extends Throwable>[] retryOn)
throws Throwable {
try {
return proceed(pjp);
} catch (Throwable throwable) {
if (isRetryThrowable(throwable, retryOn) && times-- > 0) {
log.info("Conflict detected, {} remaining retries on {}", times, Arrays.toString(retryOn));
return tryProceeding(pjp, times, retryOn);
}
throw throwable;
}
}
private boolean isRetryThrowable(Throwable throwable, Class<? extends Throwable>[] retryOn) {
Throwable[] causes = ExceptionUtils.getThrowables(throwable);
for (Throwable cause : causes) {
for (Class<? extends Throwable> retryThrowable : retryOn) {
if (retryThrowable.isAssignableFrom(cause.getClass())) {
return true;
}
}
}
return false;
}
private Retry getRetryAnnotation(ProceedingJoinPoint pjp) throws NoSuchMethodException {
MethodSignature signature = (MethodSignature) pjp.getSignature();
Method method = signature.getMethod();
Retry retryAnnotation = AnnotationUtils.findAnnotation(method, Retry.class);
if (retryAnnotation != null) {
return retryAnnotation;
}
Class[] argClasses = new Class[pjp.getArgs().length];
for (int i = 0; i < pjp.getArgs().length; i++) {
argClasses[i] = pjp.getArgs()[i].getClass();
}
method = pjp.getTarget().getClass().getMethod(pjp.getSignature().getName(), argClasses);
return AnnotationUtils.findAnnotation(method, Retry.class);
}
}

View file

@ -2,7 +2,7 @@ package net.helenus.core.aspect;
import java.lang.reflect.Method;
import java.util.Arrays;
import net.helenus.core.annotation.Retry;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
@ -13,6 +13,8 @@ import org.slf4j.LoggerFactory;
import org.springframework.core.annotation.AnnotationUtils;
import org.springframework.util.Assert;
import net.helenus.core.annotation.Retry;
@Aspect
public class RetryConcurrentUnitOfWorkAspect {
@ -37,8 +39,8 @@ public class RetryConcurrentUnitOfWorkAspect {
return tryProceeding(pjp, times, retryOn);
}
private Object tryProceeding(
ProceedingJoinPoint pjp, int times, Class<? extends Throwable>[] retryOn) throws Throwable {
private Object tryProceeding(ProceedingJoinPoint pjp, int times, Class<? extends Throwable>[] retryOn)
throws Throwable {
try {
return proceed(pjp);
} catch (Throwable throwable) {

View file

@ -0,0 +1,38 @@
/*
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.helenus.core.cache;
import java.util.Map;
import java.util.stream.Collectors;
import net.helenus.mapping.HelenusProperty;
public class BoundFacet extends Facet<String> {
private final Map<HelenusProperty, Object> properties;
BoundFacet(String name, Map<HelenusProperty, Object> properties) {
super(name,
(properties.keySet().size() > 1)
? "[" + String.join(", ",
properties.keySet().stream().map(key -> properties.get(key).toString())
.collect(Collectors.toSet()))
+ "]"
: String.join("", properties.keySet().stream().map(key -> properties.get(key).toString())
.collect(Collectors.toSet())));
this.properties = properties;
}
}

View file

@ -0,0 +1,49 @@
package net.helenus.core.cache;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
public class CacheUtil {
public static List<String[]> combinations(List<String> items) {
int n = items.size();
if (n > 20 || n < 0)
throw new IllegalArgumentException(n + " is out of range");
long e = Math.round(Math.pow(2, n));
List<String[]> out = new ArrayList<String[]>((int) e - 1);
for (int k = 1; k <= items.size(); k++) {
kCombinations(items, 0, k, new String[k], out);
}
return out;
}
private static void kCombinations(List<String> items, int n, int k, String[] arr, List<String[]> out) {
if (k == 0) {
out.add(arr.clone());
} else {
for (int i = n; i <= items.size() - k; i++) {
arr[arr.length - k] = items.get(i);
kCombinations(items, i + 1, k - 1, arr, out);
}
}
}
public static List<String[]> flattenFacets(List<Facet> facets) {
List<String[]> combinations = CacheUtil.combinations(
facets.stream().filter(facet -> !facet.fixed()).filter(facet -> facet.value() != null).map(facet -> {
return facet.name() + "==" + facet.value();
}).collect(Collectors.toList()));
return combinations;
}
public static Object merge(Object to, Object from) {
return to; // TODO(gburd): yeah...
}
public static String schemaName(List<Facet> facets) {
return facets.stream().filter(Facet::fixed).map(facet -> facet.value().toString())
.collect(Collectors.joining("."));
}
}

View file

@ -0,0 +1,53 @@
/*
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.helenus.core.cache;
/**
* An Entity is identifiable via one or more Facets
*/
public class Facet<T> {
private final String name;
private T value;
private boolean fixed = false;
public Facet(String name) {
this.name = name;
}
public Facet(String name, T value) {
this.name = name;
this.value = value;
}
public String name() {
return name;
}
public T value() {
return value;
}
public Facet setFixed() {
fixed = true;
return this;
}
public boolean fixed() {
return fixed;
}
}

View file

@ -0,0 +1,74 @@
/*
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.helenus.core.cache;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import net.helenus.core.SchemaUtil;
import net.helenus.mapping.HelenusProperty;
public class UnboundFacet extends Facet<String> {
private final List<HelenusProperty> properties;
public UnboundFacet(List<HelenusProperty> properties) {
super(SchemaUtil.createPrimaryKeyPhrase(properties));
this.properties = properties;
}
public UnboundFacet(HelenusProperty property) {
super(property.getPropertyName());
properties = new ArrayList<HelenusProperty>();
properties.add(property);
}
public List<HelenusProperty> getProperties() {
return properties;
}
public Binder binder() {
return new Binder(name(), properties);
}
public static class Binder {
private final String name;
private final List<HelenusProperty> properties = new ArrayList<HelenusProperty>();
private Map<HelenusProperty, Object> boundProperties = new HashMap<HelenusProperty, Object>();
Binder(String name, List<HelenusProperty> properties) {
this.name = name;
this.properties.addAll(properties);
}
public Binder setValueForProperty(HelenusProperty prop, Object value) {
properties.remove(prop);
boundProperties.put(prop, value);
return this;
}
public boolean isBound() {
return properties.isEmpty();
}
public BoundFacet bind() {
return new BoundFacet(name, boundProperties);
}
}
}

View file

@ -17,10 +17,12 @@ package net.helenus.core.operation;
import java.util.LinkedList;
import java.util.List;
import net.helenus.core.*;
public abstract class AbstractFilterOperation<E, O extends AbstractFilterOperation<E, O>>
extends AbstractOperation<E, O> {
extends
AbstractOperation<E, O> {
protected List<Filter<?>> filters = null;
protected List<Filter<?>> ifFilters = null;

View file

@ -19,12 +19,13 @@ import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import net.helenus.core.*;
import net.helenus.mapping.HelenusProperty;
public abstract class AbstractFilterOptionalOperation<
E, O extends AbstractFilterOptionalOperation<E, O>>
extends AbstractOptionalOperation<E, O> {
public abstract class AbstractFilterOptionalOperation<E, O extends AbstractFilterOptionalOperation<E, O>>
extends
AbstractOptionalOperation<E, O> {
protected Map<HelenusProperty, Filter<?>> filters = null;
protected List<Filter<?>> ifFilters = null;

View file

@ -19,12 +19,13 @@ import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import net.helenus.core.*;
import net.helenus.mapping.HelenusProperty;
public abstract class AbstractFilterStreamOperation<
E, O extends AbstractFilterStreamOperation<E, O>>
extends AbstractStreamOperation<E, O> {
public abstract class AbstractFilterStreamOperation<E, O extends AbstractFilterStreamOperation<E, O>>
extends
AbstractStreamOperation<E, O> {
protected Map<HelenusProperty, Filter<?>> filters = null;
protected List<Filter<?>> ifFilters = null;

View file

@ -15,14 +15,19 @@
*/
package net.helenus.core.operation;
import java.util.concurrent.CompletableFuture;
import com.codahale.metrics.Timer;
import com.datastax.driver.core.ResultSet;
import java.util.concurrent.CompletableFuture;
import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.UnitOfWork;
public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>>
extends AbstractStatementOperation<E, O> {
public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>> extends AbstractStatementOperation<E, O> {
public AbstractOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations);
}
public abstract E transform(ResultSet resultSet);
@ -30,30 +35,29 @@ public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>>
return false;
}
public AbstractOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations);
}
public PreparedOperation<E> prepare() {
return new PreparedOperation<E>(prepareStatement(), this);
}
public E sync() {
public E sync() {// throws TimeoutException {
final Timer.Context context = requestLatency.time();
try {
ResultSet resultSet = this.execute(sessionOps, null, traceContext, showValues, false);
ResultSet resultSet = this.execute(sessionOps, null, traceContext, queryExecutionTimeout, queryTimeoutUnits,
showValues, false);
return transform(resultSet);
} finally {
context.stop();
}
}
public E sync(UnitOfWork uow) {
if (uow == null) return sync();
public E sync(UnitOfWork uow) {// throws TimeoutException {
if (uow == null)
return sync();
final Timer.Context context = requestLatency.time();
try {
ResultSet resultSet = execute(sessionOps, uow, traceContext, showValues, true);
ResultSet resultSet = execute(sessionOps, uow, traceContext, queryExecutionTimeout, queryTimeoutUnits,
showValues, true);
E result = transform(resultSet);
return result;
} finally {
@ -62,11 +66,24 @@ public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>>
}
public CompletableFuture<E> async() {
return CompletableFuture.<E>supplyAsync(() -> sync());
return CompletableFuture.<E>supplyAsync(() -> {
// try {
return sync();
// } catch (TimeoutException ex) {
// throw new CompletionException(ex);
// }
});
}
public CompletableFuture<E> async(UnitOfWork uow) {
if (uow == null) return async();
return CompletableFuture.<E>supplyAsync(() -> sync(uow));
if (uow == null)
return async();
return CompletableFuture.<E>supplyAsync(() -> {
// try {
return sync();
// } catch (TimeoutException ex) {
// throw new CompletionException(ex);
// }
});
}
}

View file

@ -15,21 +15,26 @@
*/
package net.helenus.core.operation;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import com.codahale.metrics.Timer;
import com.datastax.driver.core.PreparedStatement;
import com.datastax.driver.core.ResultSet;
import com.google.common.base.Function;
import com.google.common.base.Stopwatch;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import java.util.HashSet;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.UnitOfWork;
import net.helenus.core.cache.CacheUtil;
import net.helenus.core.cache.Facet;
public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOperation<E, O>>
extends AbstractStatementOperation<E, O> {
extends
AbstractStatementOperation<E, O> {
public AbstractOptionalOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations);
@ -43,8 +48,7 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
public ListenableFuture<PreparedOptionalOperation<E>> prepareAsync() {
final O _this = (O) this;
return Futures.transform(
prepareStatementAsync(),
return Futures.transform(prepareStatementAsync(),
new Function<PreparedStatement, PreparedOptionalOperation<E>>() {
@Override
public PreparedOptionalOperation<E> apply(PreparedStatement preparedStatement) {
@ -53,49 +57,89 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
});
}
public Optional<E> sync() {
public Optional<E> sync() {// throws TimeoutException {
final Timer.Context context = requestLatency.time();
try {
ResultSet resultSet = this.execute(sessionOps, null, traceContext, showValues, false);
return transform(resultSet);
Optional<E> result = Optional.empty();
E cacheResult = null;
boolean updateCache = isSessionCacheable();
if (enableCache && isSessionCacheable()) {
List<Facet> facets = bindFacetValues();
String tableName = CacheUtil.schemaName(facets);
cacheResult = (E) sessionOps.checkCache(tableName, facets);
if (cacheResult != null) {
result = Optional.of(cacheResult);
updateCache = false;
}
}
if (!result.isPresent()) {
// Formulate the query and execute it against the Cassandra cluster.
ResultSet resultSet = this.execute(sessionOps, null, traceContext, queryExecutionTimeout,
queryTimeoutUnits, showValues, false);
// Transform the query result set into the desired shape.
result = transform(resultSet);
}
if (updateCache && result.isPresent()) {
List<Facet> facets = getFacets();
if (facets != null && facets.size() > 1) {
sessionOps.updateCache(result.get(), facets);
}
}
return result;
} finally {
context.stop();
}
}
public Optional<E> sync(UnitOfWork uow) {
if (uow == null) return sync();
public Optional<E> sync(UnitOfWork<?> uow) {// throws TimeoutException {
if (uow == null)
return sync();
final Timer.Context context = requestLatency.time();
try {
Optional<E> result = null;
String key = getStatementCacheKey();
if (enableCache && key != null) {
Set<E> cachedResult = (Set<E>) uow.cacheLookup(key);
if (cachedResult != null) {
//TODO(gburd): what about select ResultSet, Tuple... etc.?
uowCacheHits.mark();
logger.info("UOW({}) cache hit, {}", uow.hashCode(), key);
result = cachedResult.stream().findFirst();
Optional<E> result = Optional.empty();
E cacheResult = null;
boolean updateCache = true;
if (enableCache) {
Stopwatch timer = uow.getCacheLookupTimer();
timer.start();
List<Facet> facets = bindFacetValues();
cacheResult = checkCache(uow, facets);
if (cacheResult != null) {
result = Optional.of(cacheResult);
updateCache = false;
} else {
uowCacheMiss.mark();
if (isSessionCacheable()) {
String tableName = CacheUtil.schemaName(facets);
cacheResult = (E) sessionOps.checkCache(tableName, facets);
if (cacheResult != null) {
result = Optional.of(cacheResult);
}
}
}
timer.stop();
}
if (result == null) {
ResultSet resultSet = execute(sessionOps, uow, traceContext, showValues, true);
if (!result.isPresent()) {
// Formulate the query and execute it against the Cassandra cluster.
ResultSet resultSet = execute(sessionOps, uow, traceContext, queryExecutionTimeout, queryTimeoutUnits,
showValues, true);
// Transform the query result set into the desired shape.
result = transform(resultSet);
}
if (key != null) {
if (result.isPresent()) {
Set<Object> set = new HashSet<Object>(1);
set.add(result.get());
uow.getCache().put(key, set);
} else {
uow.getCache().put(key, new HashSet<Object>(0));
}
}
// If we have a result, it wasn't from the UOW cache, and we're caching things
// then we
// need to put this result into the cache for future requests to find.
if (updateCache && result.isPresent()) {
updateCache(uow, result.get(), getFacets());
}
return result;
@ -105,11 +149,24 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
}
public CompletableFuture<Optional<E>> async() {
return CompletableFuture.<Optional<E>>supplyAsync(() -> sync());
return CompletableFuture.<Optional<E>>supplyAsync(() -> {
// try {
return sync();
// } catch (TimeoutException ex) {
// throw new CompletionException(ex);
// }
});
}
public CompletableFuture<Optional<E>> async(UnitOfWork uow) {
if (uow == null) return async();
return CompletableFuture.<Optional<E>>supplyAsync(() -> sync(uow));
public CompletableFuture<Optional<E>> async(UnitOfWork<?> uow) {
if (uow == null)
return async();
return CompletableFuture.<Optional<E>>supplyAsync(() -> {
// try {
return sync();
// } catch (TimeoutException ex) {
// throw new CompletionException(ex);
// }
});
}
}

View file

@ -15,8 +15,15 @@
*/
package net.helenus.core.operation;
import brave.Tracer;
import brave.propagation.TraceContext;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.datastax.driver.core.ConsistencyLevel;
import com.datastax.driver.core.PreparedStatement;
import com.datastax.driver.core.RegularStatement;
@ -27,21 +34,26 @@ import com.datastax.driver.core.policies.FallthroughRetryPolicy;
import com.datastax.driver.core.policies.RetryPolicy;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.google.common.util.concurrent.ListenableFuture;
import brave.Tracer;
import brave.propagation.TraceContext;
import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.UnitOfWork;
import net.helenus.core.cache.Facet;
import net.helenus.core.cache.UnboundFacet;
import net.helenus.core.reflect.MapExportable;
import net.helenus.mapping.value.BeanColumnValueProvider;
import net.helenus.support.HelenusException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class AbstractStatementOperation<E, O extends AbstractStatementOperation<E, O>>
extends Operation<E> {
public abstract class AbstractStatementOperation<E, O extends AbstractStatementOperation<E, O>> extends Operation<E> {
final Logger logger = LoggerFactory.getLogger(getClass());
public abstract Statement buildStatement(boolean cached);
private static final Logger LOG = LoggerFactory.getLogger(AbstractStatementOperation.class);
protected boolean enableCache = true;
protected boolean showValues = true;
protected TraceContext traceContext;
long queryExecutionTimeout = 10;
TimeUnit queryTimeoutUnits = TimeUnit.SECONDS;
private ConsistencyLevel consistencyLevel;
private ConsistencyLevel serialConsistencyLevel;
private RetryPolicy retryPolicy;
@ -56,6 +68,8 @@ public abstract class AbstractStatementOperation<E, O extends AbstractStatementO
this.idempotent = sessionOperations.getDefaultQueryIdempotency();
}
public abstract Statement buildStatement(boolean cached);
public O ignoreCache(boolean enabled) {
enableCache = enabled;
return (O) this;
@ -203,6 +217,18 @@ public abstract class AbstractStatementOperation<E, O extends AbstractStatementO
return (O) this;
}
public O queryTimeoutMs(long ms) {
this.queryExecutionTimeout = ms;
this.queryTimeoutUnits = TimeUnit.MILLISECONDS;
return (O) this;
}
public O queryTimeout(long timeout, TimeUnit units) {
this.queryExecutionTimeout = timeout;
this.queryTimeoutUnits = units;
return (O) this;
}
public Statement options(Statement statement) {
if (defaultTimestamp != null) {
@ -255,7 +281,8 @@ public abstract class AbstractStatementOperation<E, O extends AbstractStatementO
public String cql() {
Statement statement = buildStatement(false);
if (statement == null) return "";
if (statement == null)
return "";
if (statement instanceof BuiltStatement) {
BuiltStatement buildStatement = (BuiltStatement) statement;
return buildStatement.setForceNoValues(true).getQueryString();
@ -291,4 +318,51 @@ public abstract class AbstractStatementOperation<E, O extends AbstractStatementO
throw new HelenusException("only RegularStatements can be prepared");
}
protected E checkCache(UnitOfWork<?> uow, List<Facet> facets) {
E result = null;
Optional<Object> optionalCachedResult = Optional.empty();
if (!facets.isEmpty()) {
optionalCachedResult = uow.cacheLookup(facets);
if (optionalCachedResult.isPresent()) {
uowCacheHits.mark();
LOG.info("UnitOfWork({}) cache hit using facets", uow.hashCode());
result = (E) optionalCachedResult.get();
}
}
if (result == null) {
uowCacheMiss.mark();
LOG.info("UnitOfWork({}) cache miss", uow.hashCode());
}
return result;
}
protected void updateCache(UnitOfWork<?> uow, E pojo, List<Facet> identifyingFacets) {
List<Facet> facets = new ArrayList<>();
Map<String, Object> valueMap = pojo instanceof MapExportable ? ((MapExportable) pojo).toMap() : null;
for (Facet facet : identifyingFacets) {
if (facet instanceof UnboundFacet) {
UnboundFacet unboundFacet = (UnboundFacet) facet;
UnboundFacet.Binder binder = unboundFacet.binder();
unboundFacet.getProperties().forEach(prop -> {
if (valueMap == null) {
Object value = BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
binder.setValueForProperty(prop, value.toString());
} else {
binder.setValueForProperty(prop, valueMap.get(prop.getPropertyName()).toString());
}
facets.add(binder.bind());
});
} else {
facets.add(facet);
}
}
// Cache the value (pojo), the statement key, and the fully bound facets.
uow.cacheUpdate(pojo, facets);
}
}

View file

@ -15,20 +15,27 @@
*/
package net.helenus.core.operation;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Stream;
import com.codahale.metrics.Timer;
import com.datastax.driver.core.PreparedStatement;
import com.datastax.driver.core.ResultSet;
import com.google.common.base.Function;
import com.google.common.base.Stopwatch;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Stream;
import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.UnitOfWork;
import net.helenus.core.cache.CacheUtil;
import net.helenus.core.cache.Facet;
public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperation<E, O>>
extends AbstractStatementOperation<E, O> {
extends
AbstractStatementOperation<E, O> {
public AbstractStreamOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations);
@ -42,8 +49,7 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
public ListenableFuture<PreparedStreamOperation<E>> prepareAsync() {
final O _this = (O) this;
return Futures.transform(
prepareStatementAsync(),
return Futures.transform(prepareStatementAsync(),
new Function<PreparedStatement, PreparedStreamOperation<E>>() {
@Override
public PreparedStreamOperation<E> apply(PreparedStatement preparedStatement) {
@ -52,56 +58,115 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
});
}
public Stream<E> sync() {
public Stream<E> sync() {// throws TimeoutException {
final Timer.Context context = requestLatency.time();
try {
ResultSet resultSet = this.execute(sessionOps, null, traceContext, showValues, false);
return transform(resultSet);
Stream<E> resultStream = null;
E cacheResult = null;
boolean updateCache = isSessionCacheable();
if (enableCache && isSessionCacheable()) {
List<Facet> facets = bindFacetValues();
String tableName = CacheUtil.schemaName(facets);
cacheResult = (E) sessionOps.checkCache(tableName, facets);
if (cacheResult != null) {
resultStream = Stream.of(cacheResult);
updateCache = false;
}
}
if (resultStream == null) {
// Formulate the query and execute it against the Cassandra cluster.
ResultSet resultSet = this.execute(sessionOps, null, traceContext, queryExecutionTimeout,
queryTimeoutUnits, showValues, false);
// Transform the query result set into the desired shape.
resultStream = transform(resultSet);
}
if (updateCache && resultStream != null) {
List<Facet> facets = getFacets();
if (facets != null && facets.size() > 1) {
List<E> again = new ArrayList<>();
resultStream.forEach(result -> {
sessionOps.updateCache(result, facets);
again.add(result);
});
resultStream = again.stream();
}
}
return resultStream;
} finally {
context.stop();
}
}
public Stream<E> sync(UnitOfWork uow) {
if (uow == null) return sync();
public Stream<E> sync(UnitOfWork<?> uow) {// throws TimeoutException {
if (uow == null)
return sync();
final Timer.Context context = requestLatency.time();
try {
Stream<E> result = null;
String key = getStatementCacheKey();
if (enableCache && key != null) {
Set<E> cachedResult = (Set<E>) uow.cacheLookup(key);
Stream<E> resultStream = null;
E cachedResult = null;
boolean updateCache = true;
if (enableCache) {
Stopwatch timer = uow.getCacheLookupTimer();
timer.start();
List<Facet> facets = bindFacetValues();
cachedResult = checkCache(uow, facets);
if (cachedResult != null) {
//TODO(gburd): what about select ResultSet, Tuple... etc.?
uowCacheHits.mark();
logger.info("UOW({}) cache hit, {}", uow.hashCode());
result = cachedResult.stream();
} else {
uowCacheMiss.mark();
resultStream = Stream.of(cachedResult);
updateCache = false;
}
timer.stop();
}
if (result == null) {
ResultSet resultSet = execute(sessionOps, uow, traceContext, showValues, true);
result = transform(resultSet);
if (key != null) {
uow.getCache().put(key, (Set<Object>) result);
}
if (resultStream == null) {
ResultSet resultSet = execute(sessionOps, uow, traceContext, queryExecutionTimeout, queryTimeoutUnits,
showValues, true);
resultStream = transform(resultSet);
}
return result;
// If we have a result and we're caching then we need to put it into the cache
// for future requests to find.
if (updateCache && resultStream != null) {
List<E> again = new ArrayList<>();
List<Facet> facets = getFacets();
resultStream.forEach(result -> {
updateCache(uow, result, facets);
again.add(result);
});
resultStream = again.stream();
}
return resultStream;
} finally {
context.stop();
}
}
public CompletableFuture<Stream<E>> async() {
return CompletableFuture.<Stream<E>>supplyAsync(() -> sync());
return CompletableFuture.<Stream<E>>supplyAsync(() -> {
// try {
return sync();
// } catch (TimeoutException ex) {
// throw new CompletionException(ex);
// }
});
}
public CompletableFuture<Stream<E>> async(UnitOfWork uow) {
if (uow == null) return async();
return CompletableFuture.<Stream<E>>supplyAsync(() -> sync(uow));
public CompletableFuture<Stream<E>> async(UnitOfWork<?> uow) {
if (uow == null)
return async();
return CompletableFuture.<Stream<E>>supplyAsync(() -> {
// try {
return sync();
// } catch (TimeoutException ex) {
// throw new CompletionException(ex);
// }
});
}
}

View file

@ -39,4 +39,9 @@ public final class BoundOperation<E> extends AbstractOperation<E, BoundOperation
public Statement buildStatement(boolean cached) {
return boundStatement;
}
@Override
public boolean isSessionCacheable() {
return delegate.isSessionCacheable();
}
}

View file

@ -15,19 +15,18 @@
*/
package net.helenus.core.operation;
import java.util.Optional;
import com.datastax.driver.core.BoundStatement;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.Statement;
import java.util.Optional;
public final class BoundOptionalOperation<E>
extends AbstractOptionalOperation<E, BoundOptionalOperation<E>> {
public final class BoundOptionalOperation<E> extends AbstractOptionalOperation<E, BoundOptionalOperation<E>> {
private final BoundStatement boundStatement;
private final AbstractOptionalOperation<E, ?> delegate;
public BoundOptionalOperation(
BoundStatement boundStatement, AbstractOptionalOperation<E, ?> operation) {
public BoundOptionalOperation(BoundStatement boundStatement, AbstractOptionalOperation<E, ?> operation) {
super(operation.sessionOps);
this.boundStatement = boundStatement;
this.delegate = operation;
@ -42,4 +41,9 @@ public final class BoundOptionalOperation<E>
public Statement buildStatement(boolean cached) {
return boundStatement;
}
@Override
public boolean isSessionCacheable() {
return delegate.isSessionCacheable();
}
}

View file

@ -15,27 +15,29 @@
*/
package net.helenus.core.operation;
import java.util.List;
import java.util.stream.Stream;
import com.datastax.driver.core.BoundStatement;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.Statement;
import java.util.stream.Stream;
public final class BoundStreamOperation<E>
extends AbstractStreamOperation<E, BoundStreamOperation<E>> {
import net.helenus.core.cache.Facet;
public final class BoundStreamOperation<E> extends AbstractStreamOperation<E, BoundStreamOperation<E>> {
private final BoundStatement boundStatement;
private final AbstractStreamOperation<E, ?> delegate;
public BoundStreamOperation(
BoundStatement boundStatement, AbstractStreamOperation<E, ?> operation) {
public BoundStreamOperation(BoundStatement boundStatement, AbstractStreamOperation<E, ?> operation) {
super(operation.sessionOps);
this.boundStatement = boundStatement;
this.delegate = operation;
}
@Override
public String getStatementCacheKey() {
return delegate.getStatementCacheKey();
public List<Facet> bindFacetValues() {
return delegate.bindFacetValues();
}
@Override
@ -47,4 +49,9 @@ public final class BoundStreamOperation<E>
public Statement buildStatement(boolean cached) {
return boundStatement;
}
@Override
public boolean isSessionCacheable() {
return delegate.isSessionCacheable();
}
}

View file

@ -20,6 +20,7 @@ import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.datastax.driver.core.querybuilder.QueryBuilder;
import com.datastax.driver.core.querybuilder.Select;
import com.datastax.driver.core.querybuilder.Select.Where;
import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.Filter;
import net.helenus.core.reflect.HelenusPropertyNode;
@ -73,11 +74,8 @@ public final class CountOperation extends AbstractFilterOperation<Long, CountOpe
if (entity == null) {
entity = p.getEntity();
} else if (entity != p.getEntity()) {
throw new HelenusMappingException(
"you can count columns only in single entity "
+ entity.getMappingInterface()
+ " or "
+ p.getEntity().getMappingInterface());
throw new HelenusMappingException("you can count columns only in single entity "
+ entity.getMappingInterface() + " or " + p.getEntity().getMappingInterface());
}
}
}

View file

@ -20,6 +20,7 @@ import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.datastax.driver.core.querybuilder.Delete;
import com.datastax.driver.core.querybuilder.Delete.Where;
import com.datastax.driver.core.querybuilder.QueryBuilder;
import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.Filter;
import net.helenus.core.reflect.HelenusPropertyNode;
@ -117,11 +118,8 @@ public final class DeleteOperation extends AbstractFilterOperation<ResultSet, De
if (entity == null) {
entity = p.getEntity();
} else if (entity != p.getEntity()) {
throw new HelenusMappingException(
"you can delete rows only in single entity "
+ entity.getMappingInterface()
+ " or "
+ p.getEntity().getMappingInterface());
throw new HelenusMappingException("you can delete rows only in single entity "
+ entity.getMappingInterface() + " or " + p.getEntity().getMappingInterface());
}
}
}

View file

@ -15,18 +15,20 @@
*/
package net.helenus.core.operation;
import java.util.*;
import java.util.function.Function;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.datastax.driver.core.querybuilder.Insert;
import com.datastax.driver.core.querybuilder.QueryBuilder;
import com.google.common.base.Joiner;
import java.util.*;
import java.util.function.Function;
import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.Getter;
import net.helenus.core.Helenus;
import net.helenus.core.UnitOfWork;
import net.helenus.core.reflect.DefaultPrimitiveTypes;
import net.helenus.core.reflect.Drafted;
import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.HelenusProperty;
@ -38,12 +40,10 @@ import net.helenus.support.HelenusMappingException;
public final class InsertOperation<T> extends AbstractOperation<T, InsertOperation<T>> {
private HelenusEntity entity;
private final List<Fun.Tuple2<HelenusPropertyNode, Object>> values =
new ArrayList<Fun.Tuple2<HelenusPropertyNode, Object>>();
private final List<Fun.Tuple2<HelenusPropertyNode, Object>> values = new ArrayList<Fun.Tuple2<HelenusPropertyNode, Object>>();
private final T pojo;
private final Class<?> resultType;
private HelenusEntity entity;
private boolean ifNotExists;
private int[] ttl;
@ -57,8 +57,7 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
this.resultType = ResultSet.class;
}
public InsertOperation(
AbstractSessionOperations sessionOperations, Class<?> resultType, boolean ifNotExists) {
public InsertOperation(AbstractSessionOperations sessionOperations, Class<?> resultType, boolean ifNotExists) {
super(sessionOperations);
this.ifNotExists = ifNotExists;
@ -66,12 +65,8 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
this.resultType = resultType;
}
public InsertOperation(
AbstractSessionOperations sessionOperations,
HelenusEntity entity,
T pojo,
Set<String> mutations,
boolean ifNotExists) {
public InsertOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity, T pojo,
Set<String> mutations, boolean ifNotExists) {
super(sessionOperations);
this.entity = entity;
@ -83,9 +78,18 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
Set<String> keys = (mutations == null) ? null : mutations;
for (HelenusProperty prop : properties) {
boolean addProp = false;
if (keys == null || keys.contains(prop.getPropertyName())) {
switch (prop.getColumnType()) {
case PARTITION_KEY :
case CLUSTERING_COLUMN :
addProp = true;
break;
default :
addProp = (keys == null || keys.contains(prop.getPropertyName()));
}
if (addProp) {
Object value = BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop);
value = sessionOps.getValuePreparer().prepareColumnValue(value, prop);
@ -128,7 +132,8 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
values.forEach(t -> addPropertyNode(t._1));
if (values.isEmpty()) return null;
if (values.isEmpty())
return null;
if (entity == null) {
throw new HelenusMappingException("unknown entity");
@ -140,8 +145,7 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
insert.ifNotExists();
}
values.forEach(
t -> {
values.forEach(t -> {
insert.value(t._1.getColumnName(), t._2);
});
@ -160,6 +164,7 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
Class<?> iface = entity.getMappingInterface();
if (resultType == iface) {
if (values.size() > 0) {
boolean immutable = iface.isAssignableFrom(Drafted.class);
Collection<HelenusProperty> properties = entity.getOrderedProperties();
Map<String, Object> backingMap = new HashMap<String, Object>(properties.size());
@ -172,15 +177,17 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
if (backingMap.containsKey(key)) {
// Some values man need to be converted (e.g. from String to Enum). This is done
// within the BeanColumnValueProvider below.
Optional<Function<Object, Object>> converter =
prop.getReadConverter(sessionOps.getSessionRepository());
Optional<Function<Object, Object>> converter = prop
.getReadConverter(sessionOps.getSessionRepository());
if (converter.isPresent()) {
backingMap.put(key, converter.get().apply(backingMap.get(key)));
}
} else {
// If we started this operation with an instance of this type, use values from that.
// If we started this operation with an instance of this type, use values from
// that.
if (pojo != null) {
backingMap.put(key, BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
backingMap.put(key,
BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, immutable));
} else {
// Otherwise we'll use default values for the property type if available.
Class<?> propType = prop.getJavaType();
@ -199,7 +206,8 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
return (T) Helenus.map(iface, backingMap);
}
// Oddly, this insert didn't change any value so simply return the pojo.
// TODO(gburd): this pojo is the result of a Draft.build() call which will not preserve object identity (o1 == o2), ... fix me.
// TODO(gburd): this pojo is the result of a Draft.build() call which will not
// preserve object identity (o1 == o2), ... fix me.
return (T) pojo;
}
return (T) resultSet;
@ -221,46 +229,20 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
if (entity == null) {
entity = p.getEntity();
} else if (entity != p.getEntity()) {
throw new HelenusMappingException(
"you can insert only single entity "
+ entity.getMappingInterface()
+ " or "
+ p.getEntity().getMappingInterface());
throw new HelenusMappingException("you can insert only single entity " + entity.getMappingInterface()
+ " or " + p.getEntity().getMappingInterface());
}
}
@Override
public String getStatementCacheKey() {
List<String> keys = new ArrayList<>(values.size());
values.forEach(
t -> {
HelenusPropertyNode prop = t._1;
switch (prop.getProperty().getColumnType()) {
case PARTITION_KEY:
case CLUSTERING_COLUMN:
keys.add(prop.getColumnName() + "==" + t._2.toString());
break;
default:
break;
}
});
return entity.getName() + ": " + Joiner.on(",").join(keys);
}
@Override
public T sync(UnitOfWork uow) {
public T sync(UnitOfWork uow) {// throws TimeoutException {
if (uow == null) {
return sync();
}
T result = super.sync(uow);
Class<?> iface = entity.getMappingInterface();
if (resultType == iface) {
String key = getStatementCacheKey();
if (key != null) {
Set<Object> set = new HashSet<Object>(1);
set.add(result);
uow.getCache().put(key, set);
}
updateCache(uow, result, entity.getFacets());
}
return result;
}

View file

@ -1,17 +1,37 @@
/*
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.helenus.core.operation;
import brave.Span;
import brave.Tracer;
import brave.propagation.TraceContext;
import java.util.List;
import java.util.concurrent.TimeUnit;
import com.codahale.metrics.Meter;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Timer;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.ResultSetFuture;
import com.datastax.driver.core.Statement;
import java.util.concurrent.ExecutionException;
import com.google.common.base.Stopwatch;
import brave.Span;
import brave.Tracer;
import brave.propagation.TraceContext;
import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.UnitOfWork;
import net.helenus.core.cache.Facet;
public abstract class Operation<E> {
@ -28,14 +48,11 @@ public abstract class Operation<E> {
this.requestLatency = metrics.timer("net.helenus.request-latency");
}
public ResultSet execute(
AbstractSessionOperations session,
UnitOfWork uow,
TraceContext traceContext,
boolean showValues,
boolean cached) {
public ResultSet execute(AbstractSessionOperations session, UnitOfWork uow, TraceContext traceContext, long timeout,
TimeUnit units, boolean showValues, boolean cached) { // throws TimeoutException {
// Start recording in a Zipkin sub-span our execution time to perform this operation.
// Start recording in a Zipkin sub-span our execution time to perform this
// operation.
Tracer tracer = session.getZipkinTracer();
Span span = null;
if (tracer != null && traceContext != null) {
@ -50,12 +67,18 @@ public abstract class Operation<E> {
}
Statement statement = options(buildStatement(cached));
Stopwatch timer = null;
if (uow != null) {
timer = uow.getExecutionTimer();
timer.start();
}
ResultSetFuture futureResultSet = session.executeAsync(statement, showValues);
return futureResultSet.get();
ResultSet resultSet = futureResultSet.getUninterruptibly(); // TODO(gburd): (timeout, units);
} catch (InterruptedException | ExecutionException e) {
if (uow != null)
timer.stop();
throw new RuntimeException(e);
return resultSet;
} finally {
@ -73,7 +96,16 @@ public abstract class Operation<E> {
return null;
}
public String getStatementCacheKey() {
public List<Facet> getFacets() {
return null;
}
public List<Facet> bindFacetValues() {
return null;
}
public boolean isSessionCacheable() {
return false;
}
}

View file

@ -43,4 +43,5 @@ public final class PreparedOperation<E> {
public String toString() {
return preparedStatement.getQueryString();
}
}

View file

@ -23,8 +23,7 @@ public final class PreparedOptionalOperation<E> {
private final PreparedStatement preparedStatement;
private final AbstractOptionalOperation<E, ?> operation;
public PreparedOptionalOperation(
PreparedStatement statement, AbstractOptionalOperation<E, ?> operation) {
public PreparedOptionalOperation(PreparedStatement statement, AbstractOptionalOperation<E, ?> operation) {
this.preparedStatement = statement;
this.operation = operation;
}

View file

@ -23,8 +23,7 @@ public final class PreparedStreamOperation<E> {
private final PreparedStatement preparedStatement;
private final AbstractStreamOperation<E, ?> operation;
public PreparedStreamOperation(
PreparedStatement statement, AbstractStreamOperation<E, ?> operation) {
public PreparedStreamOperation(PreparedStatement statement, AbstractStreamOperation<E, ?> operation) {
this.preparedStatement = statement;
this.operation = operation;
}

View file

@ -15,13 +15,16 @@
*/
package net.helenus.core.operation;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import java.util.List;
import java.util.Optional;
import java.util.function.Function;
public final class SelectFirstOperation<E>
extends AbstractFilterOptionalOperation<E, SelectFirstOperation<E>> {
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import net.helenus.core.cache.Facet;
public final class SelectFirstOperation<E> extends AbstractFilterOptionalOperation<E, SelectFirstOperation<E>> {
private final SelectOperation<E> delegate;
@ -38,17 +41,27 @@ public final class SelectFirstOperation<E>
}
@Override
public String getStatementCacheKey() {
return delegate.getStatementCacheKey();
public BuiltStatement buildStatement(boolean cached) {
return delegate.buildStatement(cached);
}
@Override
public BuiltStatement buildStatement(boolean cached) {
return delegate.buildStatement(cached);
public List<Facet> getFacets() {
return delegate.getFacets();
}
@Override
public List<Facet> bindFacetValues() {
return delegate.bindFacetValues();
}
@Override
public Optional<E> transform(ResultSet resultSet) {
return delegate.transform(resultSet).findFirst();
}
@Override
public boolean isSessionCacheable() {
return delegate.isSessionCacheable();
}
}

View file

@ -15,13 +15,18 @@
*/
package net.helenus.core.operation;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import java.util.List;
import java.util.Optional;
import java.util.function.Function;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import net.helenus.core.cache.Facet;
public final class SelectFirstTransformingOperation<R, E>
extends AbstractFilterOptionalOperation<R, SelectFirstTransformingOperation<R, E>> {
extends
AbstractFilterOptionalOperation<R, SelectFirstTransformingOperation<R, E>> {
private final SelectOperation<E> delegate;
private final Function<E, R> fn;
@ -36,8 +41,8 @@ public final class SelectFirstTransformingOperation<R, E>
}
@Override
public String getStatementCacheKey() {
return delegate.getStatementCacheKey();
public List<Facet> bindFacetValues() {
return delegate.bindFacetValues();
}
@Override
@ -49,4 +54,9 @@ public final class SelectFirstTransformingOperation<R, E>
public Optional<R> transform(ResultSet resultSet) {
return delegate.transform(resultSet).findFirst().map(fn);
}
@Override
public boolean isSessionCacheable() {
return delegate.isSessionCacheable();
}
}

View file

@ -15,6 +15,14 @@
*/
package net.helenus.core.operation;
import java.util.*;
import java.util.function.Function;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.Row;
import com.datastax.driver.core.querybuilder.BuiltStatement;
@ -23,13 +31,11 @@ import com.datastax.driver.core.querybuilder.QueryBuilder;
import com.datastax.driver.core.querybuilder.Select;
import com.datastax.driver.core.querybuilder.Select.Selection;
import com.datastax.driver.core.querybuilder.Select.Where;
import com.google.common.base.Joiner;
import com.google.common.collect.Iterables;
import java.util.*;
import java.util.function.Function;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import net.helenus.core.*;
import net.helenus.core.cache.Facet;
import net.helenus.core.cache.UnboundFacet;
import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.MappingUtil;
@ -41,20 +47,21 @@ import net.helenus.support.HelenusMappingException;
public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, SelectOperation<E>> {
protected Function<Row, E> rowMapper = null;
protected final List<HelenusPropertyNode> props = new ArrayList<HelenusPropertyNode>();
private static final Logger LOG = LoggerFactory.getLogger(SelectOperation.class);
protected final List<HelenusPropertyNode> props = new ArrayList<HelenusPropertyNode>();
protected Function<Row, E> rowMapper = null;
protected List<Ordering> ordering = null;
protected Integer limit = null;
protected boolean allowFiltering = false;
protected String alternateTableName = null;
protected boolean isCacheable = false;
@SuppressWarnings("unchecked")
public SelectOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations);
this.rowMapper =
new Function<Row, E>() {
this.rowMapper = new Function<Row, E>() {
@Override
public E apply(Row source) {
@ -77,31 +84,25 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
super(sessionOperations);
entity
.getOrderedProperties()
.stream()
.map(p -> new HelenusPropertyNode(p, Optional.empty()))
entity.getOrderedProperties().stream().map(p -> new HelenusPropertyNode(p, Optional.empty()))
.forEach(p -> this.props.add(p));
isCacheable = entity.isCacheable();
}
public SelectOperation(
AbstractSessionOperations sessionOperations,
HelenusEntity entity,
public SelectOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity,
Function<Row, E> rowMapper) {
super(sessionOperations);
this.rowMapper = rowMapper;
entity
.getOrderedProperties()
.stream()
.map(p -> new HelenusPropertyNode(p, Optional.empty()))
entity.getOrderedProperties().stream().map(p -> new HelenusPropertyNode(p, Optional.empty()))
.forEach(p -> this.props.add(p));
isCacheable = entity.isCacheable();
}
public SelectOperation(
AbstractSessionOperations sessionOperations,
Function<Row, E> rowMapper,
public SelectOperation(AbstractSessionOperations sessionOperations, Function<Row, E> rowMapper,
HelenusPropertyNode... props) {
super(sessionOperations);
@ -118,11 +119,8 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
if (entity == null) {
entity = prop.getEntity();
} else if (entity != prop.getEntity()) {
throw new HelenusMappingException(
"you can count records only from a single entity "
+ entity.getMappingInterface()
+ " or "
+ prop.getEntity().getMappingInterface());
throw new HelenusMappingException("you can count records only from a single entity "
+ entity.getMappingInterface() + " or " + prop.getEntity().getMappingInterface());
}
}
@ -133,12 +131,9 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
Objects.requireNonNull(materializedViewClass);
HelenusEntity entity = Helenus.entity(materializedViewClass);
this.alternateTableName = entity.getName().toCql();
this.allowFiltering = true;
return this;
}
public SelectOperation<E> from(String alternateTableName) {
this.alternateTableName = alternateTableName;
this.props.clear();
entity.getOrderedProperties().stream().map(p -> new HelenusPropertyNode(p, Optional.empty()))
.forEach(p -> this.props.add(p));
return this;
}
@ -155,9 +150,7 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
this.rowMapper = null;
return new SelectTransformingOperation<R, E>(
this,
(r) -> {
return new SelectTransformingOperation<R, E>(this, (r) -> {
Map<String, Object> map = new ValueProviderMap(r, sessionOps.getValueProvider(), entity);
return (R) Helenus.map(entityClass, map);
});
@ -194,29 +187,43 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
}
@Override
public String getStatementCacheKey() {
List<String> keys = new ArrayList<>(filters.size());
HelenusEntity entity = props.get(0).getEntity();
public boolean isSessionCacheable() {
return isCacheable;
}
for (HelenusPropertyNode prop : props) {
switch (prop.getProperty().getColumnType()) {
case PARTITION_KEY:
case CLUSTERING_COLUMN:
Filter filter = filters.get(prop.getProperty());
@Override
public List<Facet> getFacets() {
HelenusEntity entity = props.get(0).getEntity();
return entity.getFacets();
}
@Override
public List<Facet> bindFacetValues() {
HelenusEntity entity = props.get(0).getEntity();
List<Facet> boundFacets = new ArrayList<>();
for (Facet facet : entity.getFacets()) {
if (facet instanceof UnboundFacet) {
UnboundFacet unboundFacet = (UnboundFacet) facet;
UnboundFacet.Binder binder = unboundFacet.binder();
unboundFacet.getProperties().forEach(prop -> {
Filter filter = filters.get(prop);
if (filter != null) {
keys.add(filter.toString());
Object[] postulates = filter.postulateValues();
for (Object p : postulates) {
binder.setValueForProperty(prop, p.toString());
}
}
});
if (binder.isBound()) {
boundFacets.add(binder.bind());
}
} else {
return null;
}
break;
default:
if (keys.size() > 0) {
return entity.getName() + ": " + Joiner.on(",").join(keys);
}
return null;
boundFacets.add(facet);
}
}
return null;
return boundFacets;
}
@Override
@ -236,19 +243,16 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
if (entity == null) {
entity = prop.getEntity();
} else if (entity != prop.getEntity()) {
throw new HelenusMappingException(
"you can select columns only from a single entity "
+ entity.getMappingInterface()
+ " or "
+ prop.getEntity().getMappingInterface());
throw new HelenusMappingException("you can select columns only from a single entity "
+ entity.getMappingInterface() + " or " + prop.getEntity().getMappingInterface());
}
if (cached) {
switch (prop.getProperty().getColumnType()) {
case PARTITION_KEY:
case CLUSTERING_COLUMN:
case PARTITION_KEY :
case CLUSTERING_COLUMN :
break;
default:
default :
if (entity.equals(prop.getEntity())) {
if (prop.getNext().isPresent()) {
columnName = Iterables.getLast(prop).getColumnName().toCql(true);
@ -288,8 +292,7 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
}
if (ifFilters != null && !ifFilters.isEmpty()) {
logger.error(
"onlyIf conditions " + ifFilters + " would be ignored in the statement " + select);
LOG.error("onlyIf conditions " + ifFilters + " would be ignored in the statement " + select);
}
if (allowFiltering) {
@ -303,14 +306,12 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
@Override
public Stream<E> transform(ResultSet resultSet) {
if (rowMapper != null) {
return StreamSupport.stream(
Spliterators.spliteratorUnknownSize(resultSet.iterator(), Spliterator.ORDERED), false)
return StreamSupport
.stream(Spliterators.spliteratorUnknownSize(resultSet.iterator(), Spliterator.ORDERED), false)
.map(rowMapper);
} else {
return (Stream<E>)
StreamSupport.stream(
Spliterators.spliteratorUnknownSize(resultSet.iterator(), Spliterator.ORDERED),
false);
return (Stream<E>) StreamSupport
.stream(Spliterators.spliteratorUnknownSize(resultSet.iterator(), Spliterator.ORDERED), false);
}
}

View file

@ -15,13 +15,18 @@
*/
package net.helenus.core.operation;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import java.util.List;
import java.util.function.Function;
import java.util.stream.Stream;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import net.helenus.core.cache.Facet;
public final class SelectTransformingOperation<R, E>
extends AbstractFilterStreamOperation<R, SelectTransformingOperation<R, E>> {
extends
AbstractFilterStreamOperation<R, SelectTransformingOperation<R, E>> {
private final SelectOperation<E> delegate;
private final Function<E, R> fn;
@ -36,8 +41,13 @@ public final class SelectTransformingOperation<R, E>
}
@Override
public String getStatementCacheKey() {
return delegate.getStatementCacheKey();
public List<Facet> bindFacetValues() {
return delegate.bindFacetValues();
}
@Override
public List<Facet> getFacets() {
return delegate.getFacets();
}
@Override

View file

@ -15,13 +15,15 @@
*/
package net.helenus.core.operation;
import java.util.*;
import java.util.function.Function;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.Assignment;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.datastax.driver.core.querybuilder.QueryBuilder;
import com.datastax.driver.core.querybuilder.Update;
import java.util.*;
import java.util.function.Function;
import net.helenus.core.*;
import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.HelenusEntity;
@ -32,12 +34,10 @@ import net.helenus.support.Immutables;
public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateOperation<E>> {
private HelenusEntity entity = null;
private final List<Assignment> assignments = new ArrayList<Assignment>();
private final AbstractEntityDraft<E> draft;
private final Map<String, Object> draftMap;
private HelenusEntity entity = null;
private int[] ttl;
private long[] timestamp;
@ -47,15 +47,13 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
this.draftMap = null;
}
public UpdateOperation(
AbstractSessionOperations sessionOperations, AbstractEntityDraft<E> draft) {
public UpdateOperation(AbstractSessionOperations sessionOperations, AbstractEntityDraft<E> draft) {
super(sessionOperations);
this.draft = draft;
this.draftMap = draft.toMap();
}
public UpdateOperation(
AbstractSessionOperations sessionOperations, HelenusPropertyNode p, Object v) {
public UpdateOperation(AbstractSessionOperations sessionOperations, HelenusPropertyNode p, Object v) {
super(sessionOperations);
this.draft = null;
this.draftMap = null;
@ -297,8 +295,7 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
Object valueObj = value;
Optional<Function<Object, Object>> converter =
prop.getWriteConverter(sessionOps.getSessionRepository());
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
if (converter.isPresent()) {
List convertedList = (List) converter.get().apply(Immutables.listOf(value));
valueObj = convertedList.get(0);
@ -313,8 +310,7 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
List valueObj = value;
Optional<Function<Object, Object>> converter =
prop.getWriteConverter(sessionOps.getSessionRepository());
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
if (converter.isPresent()) {
valueObj = (List) converter.get().apply(value);
}
@ -419,8 +415,7 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
HelenusProperty prop = p.getProperty();
Object valueObj = value;
Optional<Function<Object, Object>> converter =
prop.getWriteConverter(sessionOps.getSessionRepository());
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
if (converter.isPresent()) {
Set convertedSet = (Set) converter.get().apply(Immutables.setOf(value));
valueObj = convertedSet.iterator().next();
@ -434,8 +429,7 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
HelenusProperty prop = p.getProperty();
Set valueObj = value;
Optional<Function<Object, Object>> converter =
prop.getWriteConverter(sessionOps.getSessionRepository());
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
if (converter.isPresent()) {
valueObj = (Set) converter.get().apply(value);
}
@ -459,11 +453,10 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(mapGetter);
HelenusProperty prop = p.getProperty();
Optional<Function<Object, Object>> converter =
prop.getWriteConverter(sessionOps.getSessionRepository());
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
if (converter.isPresent()) {
Map<Object, Object> convertedMap =
(Map<Object, Object>) converter.get().apply(Immutables.mapOf(key, value));
Map<Object, Object> convertedMap = (Map<Object, Object>) converter.get()
.apply(Immutables.mapOf(key, value));
for (Map.Entry<Object, Object> e : convertedMap.entrySet()) {
assignments.add(QueryBuilder.put(p.getColumnName(), e.getKey(), e.getValue()));
}
@ -488,8 +481,7 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(mapGetter);
HelenusProperty prop = p.getProperty();
Optional<Function<Object, Object>> converter =
prop.getWriteConverter(sessionOps.getSessionRepository());
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
if (converter.isPresent()) {
Map convertedMap = (Map) converter.get().apply(map);
assignments.add(QueryBuilder.putAll(p.getColumnName(), convertedMap));
@ -569,27 +561,21 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
if (entity == null) {
entity = p.getEntity();
} else if (entity != p.getEntity()) {
throw new HelenusMappingException(
"you can update columns only in single entity "
+ entity.getMappingInterface()
+ " or "
+ p.getEntity().getMappingInterface());
throw new HelenusMappingException("you can update columns only in single entity "
+ entity.getMappingInterface() + " or " + p.getEntity().getMappingInterface());
}
}
@Override
public E sync(UnitOfWork uow) {
public E sync(UnitOfWork uow) {// throws TimeoutException {
if (uow == null) {
return sync();
}
E result = super.sync(uow);
// TODO(gburd): Only drafted entity objects are updated in the cache at this
// time.
if (draft != null) {
String key = getStatementCacheKey();
if (key != null) {
Set<Object> set = new HashSet<Object>(1);
set.add(result);
uow.getCache().put(key, set);
}
updateCache(uow, result, getFacets());
}
return result;
}

View file

@ -19,20 +19,10 @@ import java.util.HashMap;
import java.util.Map;
public enum DefaultPrimitiveTypes {
BOOLEAN(boolean.class, false),
BYTE(byte.class, (byte) 0x0),
CHAR(char.class, (char) 0x0),
SHORT(short.class, (short) 0),
INT(int.class, 0),
LONG(long.class, 0L),
FLOAT(float.class, 0.0f),
DOUBLE(double.class, 0.0);
BOOLEAN(boolean.class, false), BYTE(byte.class, (byte) 0x0), CHAR(char.class, (char) 0x0), SHORT(short.class,
(short) 0), INT(int.class, 0), LONG(long.class, 0L), FLOAT(float.class, 0.0f), DOUBLE(double.class, 0.0);
private final Class<?> primitiveClass;
private final Object defaultValue;
private static final Map<Class<?>, DefaultPrimitiveTypes> map =
new HashMap<Class<?>, DefaultPrimitiveTypes>();
private static final Map<Class<?>, DefaultPrimitiveTypes> map = new HashMap<Class<?>, DefaultPrimitiveTypes>();
static {
for (DefaultPrimitiveTypes type : DefaultPrimitiveTypes.values()) {
@ -40,6 +30,9 @@ public enum DefaultPrimitiveTypes {
}
}
private final Class<?> primitiveClass;
private final Object defaultValue;
private DefaultPrimitiveTypes(Class<?> primitiveClass, Object defaultValue) {
this.primitiveClass = primitiveClass;
this.defaultValue = defaultValue;

View file

@ -1,6 +1,20 @@
/*
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.helenus.core.reflect;
import java.util.Set;
public interface Drafted<T> extends MapExportable {

View file

@ -16,17 +16,18 @@
package net.helenus.core.reflect;
import com.datastax.driver.core.Metadata;
import net.helenus.mapping.HelenusEntity;
public interface DslExportable {
public static final String GET_ENTITY_METHOD = "getHelenusMappingEntity";
public static final String GET_PARENT_METHOD = "getParentDslHelenusPropertyNode";
public static final String SET_METADATA_METHOD = "setCassandraMetadataForHelenusSesion";
String GET_ENTITY_METHOD = "getHelenusMappingEntity";
String GET_PARENT_METHOD = "getParentDslHelenusPropertyNode";
String SET_METADATA_METHOD = "setCassandraMetadataForHelenusSession";
HelenusEntity getHelenusMappingEntity();
HelenusPropertyNode getParentDslHelenusPropertyNode();
void setCassandraMetadataForHelenusSesion(Metadata metadata);
void setCassandraMetadataForHelenusSession(Metadata metadata);
}

View file

@ -15,13 +15,15 @@
*/
package net.helenus.core.reflect;
import com.datastax.driver.core.*;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import com.datastax.driver.core.*;
import net.helenus.core.Helenus;
import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.HelenusMappingEntity;
@ -34,23 +36,16 @@ import net.helenus.support.HelenusException;
public class DslInvocationHandler<E> implements InvocationHandler {
private final Class<E> iface;
private final ClassLoader classLoader;
private final Optional<HelenusPropertyNode> parent;
private final Map<Method, HelenusProperty> map = new HashMap<Method, HelenusProperty>();
private final Map<Method, Object> udtMap = new HashMap<Method, Object>();
private final Map<Method, Object> tupleMap = new HashMap<Method, Object>();
private HelenusEntity entity = null;
private Metadata metadata = null;
private final Class<E> iface;
private final ClassLoader classLoader;
private final Optional<HelenusPropertyNode> parent;
private final Map<Method, HelenusProperty> map = new HashMap<Method, HelenusProperty>();
private final Map<Method, Object> udtMap = new HashMap<Method, Object>();
private final Map<Method, Object> tupleMap = new HashMap<Method, Object>();
public DslInvocationHandler(
Class<E> iface,
ClassLoader classLoader,
Optional<HelenusPropertyNode> parent,
public DslInvocationHandler(Class<E> iface, ClassLoader classLoader, Optional<HelenusPropertyNode> parent,
Metadata metadata) {
this.metadata = metadata;
@ -59,7 +54,7 @@ public class DslInvocationHandler<E> implements InvocationHandler {
this.classLoader = classLoader;
}
public void setCassandraMetadataForHelenusSesion(Metadata metadata) {
public void setCassandraMetadataForHelenusSession(Metadata metadata) {
if (metadata != null) {
this.metadata = metadata;
entity = init(metadata);
@ -78,11 +73,7 @@ public class DslInvocationHandler<E> implements InvocationHandler {
if (type instanceof UDTDataType && !UDTValue.class.isAssignableFrom(javaType)) {
Object childDsl =
Helenus.dsl(
javaType,
classLoader,
Optional.of(new HelenusPropertyNode(prop, parent)),
Object childDsl = Helenus.dsl(javaType, classLoader, Optional.of(new HelenusPropertyNode(prop, parent)),
metadata);
udtMap.put(prop.getGetterMethod(), childDsl);
@ -91,15 +82,10 @@ public class DslInvocationHandler<E> implements InvocationHandler {
if (type instanceof DTDataType) {
DTDataType dataType = (DTDataType) type;
if (dataType.getDataType() instanceof TupleType
&& !TupleValue.class.isAssignableFrom(javaType)) {
if (dataType.getDataType() instanceof TupleType && !TupleValue.class.isAssignableFrom(javaType)) {
Object childDsl =
Helenus.dsl(
javaType,
classLoader,
Optional.of(new HelenusPropertyNode(prop, parent)),
metadata);
Object childDsl = Helenus.dsl(javaType, classLoader,
Optional.of(new HelenusPropertyNode(prop, parent)), metadata);
tupleMap.put(prop.getGetterMethod(), childDsl);
}
@ -126,11 +112,9 @@ public class DslInvocationHandler<E> implements InvocationHandler {
return false;
}
if (DslExportable.SET_METADATA_METHOD.equals(methodName)
&& args.length == 1
&& args[0] instanceof Metadata) {
if (DslExportable.SET_METADATA_METHOD.equals(methodName) && args.length == 1 && args[0] instanceof Metadata) {
if (metadata == null) {
this.setCassandraMetadataForHelenusSesion((Metadata) args[0]);
this.setCassandraMetadataForHelenusSession((Metadata) args[0]);
}
return null;
}
@ -182,7 +166,7 @@ public class DslInvocationHandler<E> implements InvocationHandler {
DataType dt = dataType.getDataType();
switch (dt.getName()) {
case TUPLE:
case TUPLE :
Object childDsl = tupleMap.get(method);
if (childDsl != null) {
@ -191,16 +175,16 @@ public class DslInvocationHandler<E> implements InvocationHandler {
break;
case SET:
case SET :
return new SetDsl(new HelenusPropertyNode(prop, parent));
case LIST:
case LIST :
return new ListDsl(new HelenusPropertyNode(prop, parent));
case MAP:
case MAP :
return new MapDsl(new HelenusPropertyNode(prop, parent));
default:
default :
break;
}
}

View file

@ -19,7 +19,9 @@ import java.lang.annotation.Annotation;
import java.lang.reflect.Method;
import java.util.Optional;
import java.util.function.Function;
import javax.validation.ConstraintValidator;
import net.helenus.core.SessionRepository;
import net.helenus.mapping.*;
import net.helenus.mapping.type.AbstractDataType;

View file

@ -17,6 +17,7 @@ package net.helenus.core.reflect;
import java.util.*;
import java.util.stream.Collectors;
import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.HelenusProperty;

View file

@ -16,6 +16,7 @@
package net.helenus.core.reflect;
import java.util.*;
import net.helenus.mapping.HelenusProperty;
import net.helenus.support.DslPropertyException;
import net.helenus.support.HelenusMappingException;

View file

@ -19,6 +19,7 @@ import java.util.Collection;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import net.helenus.mapping.HelenusProperty;
import net.helenus.support.DslPropertyException;
import net.helenus.support.HelenusMappingException;

View file

@ -23,6 +23,7 @@ import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.util.Collections;
import java.util.Map;
import net.helenus.core.Helenus;
import net.helenus.mapping.annotation.Transient;
import net.helenus.support.HelenusException;
@ -39,23 +40,22 @@ public class MapperInvocationHandler<E> implements InvocationHandler, Serializab
}
private Object invokeDefault(Object proxy, Method method, Object[] args) throws Throwable {
// NOTE: This is reflection magic to invoke (non-recursively) a default method implemented on an interface
// that we've proxied (in ReflectionDslInstantiator). I found the answer in this article.
// NOTE: This is reflection magic to invoke (non-recursively) a default method
// implemented on an interface
// that we've proxied (in ReflectionDslInstantiator). I found the answer in this
// article.
// https://zeroturnaround.com/rebellabs/recognize-and-conquer-java-proxies-default-methods-and-method-handles/
// First, we need an instance of a private inner-class found in MethodHandles.
Constructor<MethodHandles.Lookup> constructor =
MethodHandles.Lookup.class.getDeclaredConstructor(Class.class, int.class);
Constructor<MethodHandles.Lookup> constructor = MethodHandles.Lookup.class.getDeclaredConstructor(Class.class,
int.class);
constructor.setAccessible(true);
// Now we need to lookup and invoke special the default method on the interface class.
// Now we need to lookup and invoke special the default method on the interface
// class.
final Class<?> declaringClass = method.getDeclaringClass();
Object result =
constructor
.newInstance(declaringClass, MethodHandles.Lookup.PRIVATE)
.unreflectSpecial(method, declaringClass)
.bindTo(proxy)
.invokeWithArguments(args);
Object result = constructor.newInstance(declaringClass, MethodHandles.Lookup.PRIVATE)
.unreflectSpecial(method, declaringClass).bindTo(proxy).invokeWithArguments(args);
return result;
}
@ -75,7 +75,12 @@ public class MapperInvocationHandler<E> implements InvocationHandler, Serializab
return false;
}
if (Proxy.isProxyClass(otherObj.getClass())) {
return this == Proxy.getInvocationHandler(otherObj);
if (this == Proxy.getInvocationHandler(otherObj)) {
return true;
}
}
if (otherObj instanceof MapExportable && src.equals(((MapExportable) otherObj).toMap())) {
return true;
}
return false;
}
@ -106,13 +111,15 @@ public class MapperInvocationHandler<E> implements InvocationHandler, Serializab
if (value == null) {
// Default implementations of non-Transient methods in entities are the default value when the
// Default implementations of non-Transient methods in entities are the default
// value when the
// map contains 'null'.
if (method.isDefault()) {
return invokeDefault(proxy, method, args);
}
// Otherwise, if the return type of the method is a primitive Java type then we'll return the standard
// Otherwise, if the return type of the method is a primitive Java type then
// we'll return the standard
// default values to avoid a NPE in user code.
if (returnType.isPrimitive()) {
DefaultPrimitiveTypes type = DefaultPrimitiveTypes.lookup(returnType);

View file

@ -15,9 +15,11 @@
*/
package net.helenus.core.reflect;
import com.datastax.driver.core.Metadata;
import java.lang.reflect.Proxy;
import java.util.Optional;
import com.datastax.driver.core.Metadata;
import net.helenus.core.DslInstantiator;
public enum ReflectionDslInstantiator implements DslInstantiator {
@ -25,15 +27,10 @@ public enum ReflectionDslInstantiator implements DslInstantiator {
@Override
@SuppressWarnings("unchecked")
public <E> E instantiate(
Class<E> iface,
ClassLoader classLoader,
Optional<HelenusPropertyNode> parent,
public <E> E instantiate(Class<E> iface, ClassLoader classLoader, Optional<HelenusPropertyNode> parent,
Metadata metadata) {
DslInvocationHandler<E> handler =
new DslInvocationHandler<E>(iface, classLoader, parent, metadata);
E proxy =
(E) Proxy.newProxyInstance(classLoader, new Class[] {iface, DslExportable.class}, handler);
DslInvocationHandler<E> handler = new DslInvocationHandler<E>(iface, classLoader, parent, metadata);
E proxy = (E) Proxy.newProxyInstance(classLoader, new Class[]{iface, DslExportable.class}, handler);
return proxy;
}
}

View file

@ -19,7 +19,8 @@ import net.helenus.support.HelenusMappingException;
public final class ReflectionInstantiator {
private ReflectionInstantiator() {}
private ReflectionInstantiator() {
}
public static <T> T instantiateClass(Class<T> clazz) {

View file

@ -17,6 +17,7 @@ package net.helenus.core.reflect;
import java.lang.reflect.Proxy;
import java.util.Map;
import net.helenus.core.MapperInstantiator;
public enum ReflectionMapperInstantiator implements MapperInstantiator {
@ -27,8 +28,7 @@ public enum ReflectionMapperInstantiator implements MapperInstantiator {
public <E> E instantiate(Class<E> iface, Map<String, Object> src, ClassLoader classLoader) {
MapperInvocationHandler<E> handler = new MapperInvocationHandler<E>(iface, src);
E proxy =
(E) Proxy.newProxyInstance(classLoader, new Class[] {iface, MapExportable.class}, handler);
E proxy = (E) Proxy.newProxyInstance(classLoader, new Class[]{iface, MapExportable.class}, handler);
return proxy;
}
}

View file

@ -18,6 +18,7 @@ package net.helenus.core.reflect;
import java.util.Collection;
import java.util.Iterator;
import java.util.Set;
import net.helenus.support.HelenusMappingException;
public final class SetDsl<V> implements Set<V> {

View file

@ -16,6 +16,7 @@
package net.helenus.mapping;
import java.lang.reflect.Method;
import net.helenus.mapping.annotation.ClusteringColumn;
import net.helenus.mapping.annotation.Column;
import net.helenus.mapping.annotation.PartitionKey;
@ -102,21 +103,13 @@ public final class ColumnInformation {
private void ensureSingleColumnType(ColumnType columnTypeLocal, Method getter) {
if (columnTypeLocal != ColumnType.COLUMN) {
throw new HelenusMappingException(
"property can be annotated only by a single column type " + getter);
throw new HelenusMappingException("property can be annotated only by a single column type " + getter);
}
}
@Override
public String toString() {
return "ColumnInformation [columnName="
+ columnName
+ ", columnType="
+ columnType
+ ", ordinal="
+ ordinal
+ ", ordering="
+ ordering
+ "]";
return "ColumnInformation [columnName=" + columnName + ", columnType=" + columnType + ", ordinal=" + ordinal
+ ", ordering=" + ordering + "]";
}
}

View file

@ -16,8 +16,5 @@
package net.helenus.mapping;
public enum ColumnType {
PARTITION_KEY,
CLUSTERING_COLUMN,
STATIC_COLUMN,
COLUMN;
PARTITION_KEY, CLUSTERING_COLUMN, STATIC_COLUMN, COLUMN;
}

View file

@ -16,6 +16,9 @@
package net.helenus.mapping;
import java.util.Collection;
import java.util.List;
import net.helenus.core.cache.Facet;
public interface HelenusEntity {
@ -30,4 +33,6 @@ public interface HelenusEntity {
Collection<HelenusProperty> getOrderedProperties();
HelenusProperty getProperty(String name);
List<Facet> getFacets();
}

View file

@ -16,8 +16,5 @@
package net.helenus.mapping;
public enum HelenusEntityType {
TABLE,
VIEW,
TUPLE,
UDT;
TABLE, VIEW, TUPLE, UDT;
}

View file

@ -15,17 +15,23 @@
*/
package net.helenus.mapping;
import com.datastax.driver.core.*;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import java.lang.reflect.Method;
import java.util.*;
import org.apache.commons.lang3.ClassUtils;
import com.datastax.driver.core.DefaultMetadata;
import com.datastax.driver.core.Metadata;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import net.helenus.config.HelenusSettings;
import net.helenus.core.Helenus;
import net.helenus.core.annotation.Cacheable;
import net.helenus.core.cache.Facet;
import net.helenus.core.cache.UnboundFacet;
import net.helenus.mapping.annotation.*;
import net.helenus.support.HelenusMappingException;
import org.apache.commons.lang3.ClassUtils;
public final class HelenusMappingEntity implements HelenusEntity {
@ -36,6 +42,7 @@ public final class HelenusMappingEntity implements HelenusEntity {
private final ImmutableMap<String, Method> methods;
private final ImmutableMap<String, HelenusProperty> props;
private final ImmutableList<HelenusProperty> orderedProps;
private final List<Facet> facets;
public HelenusMappingEntity(Class<?> iface, Metadata metadata) {
this(iface, autoDetectType(iface), metadata);
@ -59,8 +66,7 @@ public final class HelenusMappingEntity implements HelenusEntity {
}
for (Class<?> c : ClassUtils.getAllInterfaces(iface)) {
if (c.getDeclaredAnnotation(Table.class) != null
|| c.getDeclaredAnnotation(InheritedTable.class) != null) {
if (c.getDeclaredAnnotation(Table.class) != null || c.getDeclaredAnnotation(InheritedTable.class) != null) {
for (Method m : c.getDeclaredMethods()) {
Method o = methods.get(m.getName());
if (o != null) {
@ -102,7 +108,70 @@ public final class HelenusMappingEntity implements HelenusEntity {
validateOrdinals();
// Caching
cacheable = (null != iface.getDeclaredAnnotation(Cacheable.class));
List<HelenusProperty> primaryKeyProperties = new ArrayList<>();
ImmutableList.Builder<Facet> facetsBuilder = ImmutableList.builder();
facetsBuilder.add(new Facet("table", name.toCql()).setFixed());
for (HelenusProperty prop : orderedProps) {
switch (prop.getColumnType()) {
case PARTITION_KEY :
case CLUSTERING_COLUMN :
primaryKeyProperties.add(prop);
break;
default :
if (primaryKeyProperties != null && primaryKeyProperties.size() > 0) {
facetsBuilder.add(new UnboundFacet(primaryKeyProperties));
primaryKeyProperties = null;
}
Optional<IdentityName> optionalIndexName = prop.getIndexName();
if (optionalIndexName.isPresent()) {
UnboundFacet facet = new UnboundFacet(prop);
facetsBuilder.add(facet);
}
}
}
if (primaryKeyProperties != null && primaryKeyProperties.size() > 0) {
facetsBuilder.add(new UnboundFacet(primaryKeyProperties));
}
this.facets = facetsBuilder.build();
}
private static IdentityName resolveName(Class<?> iface, HelenusEntityType type) {
switch (type) {
case TABLE :
return MappingUtil.getTableName(iface, true);
case VIEW :
return MappingUtil.getViewName(iface, true);
case TUPLE :
return IdentityName.of(MappingUtil.getDefaultEntityName(iface), false);
case UDT :
return MappingUtil.getUserDefinedTypeName(iface, true);
}
throw new HelenusMappingException("invalid entity type " + type + " in " + type);
}
private static HelenusEntityType autoDetectType(Class<?> iface) {
Objects.requireNonNull(iface, "empty iface");
if (null != iface.getDeclaredAnnotation(Table.class)) {
return HelenusEntityType.TABLE;
} else if (null != iface.getDeclaredAnnotation(MaterializedView.class)) {
return HelenusEntityType.VIEW;
} else if (null != iface.getDeclaredAnnotation(Tuple.class)) {
return HelenusEntityType.TUPLE;
} else if (null != iface.getDeclaredAnnotation(UDT.class)) {
return HelenusEntityType.UDT;
}
throw new HelenusMappingException("entity must be annotated by @Table or @Tuple or @UserDefinedType " + iface);
}
@Override
@ -130,65 +199,33 @@ public final class HelenusMappingEntity implements HelenusEntity {
HelenusProperty property = props.get(name);
if (property == null && methods.containsKey(name)) {
property = new HelenusMappingProperty(this, methods.get(name), new DefaultMetadata());
return property; //TODO(gburd): review adding these into the props map...
return property; // TODO(gburd): review adding these into the props map...
}
return props.get(name);
}
@Override
public List<Facet> getFacets() {
return facets;
}
@Override
public IdentityName getName() {
return name;
}
private static IdentityName resolveName(Class<?> iface, HelenusEntityType type) {
switch (type) {
case TABLE:
return MappingUtil.getTableName(iface, true);
case VIEW:
return MappingUtil.getViewName(iface, true);
case TUPLE:
return IdentityName.of(MappingUtil.getDefaultEntityName(iface), false);
case UDT:
return MappingUtil.getUserDefinedTypeName(iface, true);
}
throw new HelenusMappingException("invalid entity type " + type + " in " + type);
}
private static HelenusEntityType autoDetectType(Class<?> iface) {
Objects.requireNonNull(iface, "empty iface");
if (null != iface.getDeclaredAnnotation(Table.class)) {
return HelenusEntityType.TABLE;
} else if (null != iface.getDeclaredAnnotation(MaterializedView.class)) {
return HelenusEntityType.VIEW;
} else if (null != iface.getDeclaredAnnotation(Tuple.class)) {
return HelenusEntityType.TUPLE;
} else if (null != iface.getDeclaredAnnotation(UDT.class)) {
return HelenusEntityType.UDT;
}
throw new HelenusMappingException(
"entity must be annotated by @Table or @Tuple or @UserDefinedType " + iface);
}
private void validateOrdinals() {
switch (getType()) {
case TABLE:
case TABLE :
validateOrdinalsForTable();
break;
case TUPLE:
case TUPLE :
validateOrdinalsInTuple();
break;
default:
default :
break;
}
}
@ -205,29 +242,24 @@ public final class HelenusMappingEntity implements HelenusEntity {
int ordinal = prop.getOrdinal();
switch (type) {
case PARTITION_KEY:
case PARTITION_KEY :
if (partitionKeys.get(ordinal)) {
throw new HelenusMappingException(
"detected two or more partition key columns with the same ordinal "
+ ordinal
+ " in "
"detected two or more partition key columns with the same ordinal " + ordinal + " in "
+ prop.getEntity());
}
partitionKeys.set(ordinal);
break;
case CLUSTERING_COLUMN:
case CLUSTERING_COLUMN :
if (clusteringColumns.get(ordinal)) {
throw new HelenusMappingException(
"detected two or clustering columns with the same ordinal "
+ ordinal
+ " in "
+ prop.getEntity());
throw new HelenusMappingException("detected two or clustering columns with the same ordinal "
+ ordinal + " in " + prop.getEntity());
}
clusteringColumns.set(ordinal);
break;
default:
default :
break;
}
}
@ -236,27 +268,17 @@ public final class HelenusMappingEntity implements HelenusEntity {
private void validateOrdinalsInTuple() {
boolean[] ordinals = new boolean[props.size()];
getOrderedProperties()
.forEach(
p -> {
getOrderedProperties().forEach(p -> {
int ordinal = p.getOrdinal();
if (ordinal < 0 || ordinal >= ordinals.length) {
throw new HelenusMappingException(
"invalid ordinal "
+ ordinal
+ " found for property "
+ p.getPropertyName()
+ " in "
+ p.getEntity());
throw new HelenusMappingException("invalid ordinal " + ordinal + " found for property "
+ p.getPropertyName() + " in " + p.getEntity());
}
if (ordinals[ordinal]) {
throw new HelenusMappingException(
"detected two or more properties with the same ordinal "
+ ordinal
+ " in "
+ p.getEntity());
"detected two or more properties with the same ordinal " + ordinal + " in " + p.getEntity());
}
ordinals[ordinal] = true;
@ -273,12 +295,8 @@ public final class HelenusMappingEntity implements HelenusEntity {
public String toString() {
StringBuilder str = new StringBuilder();
str.append(iface.getSimpleName())
.append("(")
.append(name.getName())
.append(") ")
.append(type.name().toLowerCase())
.append(":\n");
str.append(iface.getSimpleName()).append("(").append(name.getName()).append(") ")
.append(type.name().toLowerCase()).append(":\n");
for (HelenusProperty prop : getOrderedProperties()) {
str.append(prop.toString());

View file

@ -15,13 +15,16 @@
*/
package net.helenus.mapping;
import com.datastax.driver.core.Metadata;
import java.lang.annotation.Annotation;
import java.lang.reflect.Method;
import java.lang.reflect.Type;
import java.util.Optional;
import java.util.function.Function;
import javax.validation.ConstraintValidator;
import com.datastax.driver.core.Metadata;
import net.helenus.core.SessionRepository;
import net.helenus.mapping.javatype.AbstractJavaType;
import net.helenus.mapping.javatype.MappingJavaTypes;
@ -42,12 +45,10 @@ public final class HelenusMappingProperty implements HelenusProperty {
private final Class<?> javaType;
private final AbstractJavaType abstractJavaType;
private final AbstractDataType dataType;
private final ConstraintValidator<? extends Annotation, ?>[] validators;
private volatile Optional<Function<Object, Object>> readConverter = null;
private volatile Optional<Function<Object, Object>> writeConverter = null;
private final ConstraintValidator<? extends Annotation, ?>[] validators;
public HelenusMappingProperty(HelenusMappingEntity entity, Method getter, Metadata metadata) {
this.entity = entity;
this.getter = getter;
@ -62,9 +63,8 @@ public final class HelenusMappingProperty implements HelenusProperty {
this.javaType = getter.getReturnType();
this.abstractJavaType = MappingJavaTypes.resolveJavaType(this.javaType);
this.dataType =
abstractJavaType.resolveDataType(
this.getter, this.genericJavaType, this.columnInfo.getColumnType(), metadata);
this.dataType = abstractJavaType.resolveDataType(this.getter, this.genericJavaType,
this.columnInfo.getColumnType(), metadata);
this.validators = MappingUtil.getValidators(getter);
}
@ -168,13 +168,13 @@ public final class HelenusMappingProperty implements HelenusProperty {
ColumnType type = this.getColumnType();
switch (type) {
case PARTITION_KEY:
case PARTITION_KEY :
str.append("partition_key[");
str.append(this.getOrdinal());
str.append("] ");
break;
case CLUSTERING_COLUMN:
case CLUSTERING_COLUMN :
str.append("clustering_column[");
str.append(this.getOrdinal());
str.append("] ");
@ -184,11 +184,11 @@ public final class HelenusMappingProperty implements HelenusProperty {
}
break;
case STATIC_COLUMN:
case STATIC_COLUMN :
str.append("static ");
break;
case COLUMN:
case COLUMN :
break;
}

View file

@ -19,7 +19,9 @@ import java.lang.annotation.Annotation;
import java.lang.reflect.Method;
import java.util.Optional;
import java.util.function.Function;
import javax.validation.ConstraintValidator;
import net.helenus.core.SessionRepository;
import net.helenus.mapping.type.AbstractDataType;

View file

@ -20,8 +20,10 @@ import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import javax.validation.Constraint;
import javax.validation.ConstraintValidator;
import net.helenus.core.Getter;
import net.helenus.core.Helenus;
import net.helenus.core.reflect.*;
@ -32,10 +34,10 @@ import net.helenus.support.HelenusMappingException;
public final class MappingUtil {
@SuppressWarnings("unchecked")
public static final ConstraintValidator<? extends Annotation, ?>[] EMPTY_VALIDATORS =
new ConstraintValidator[0];
public static final ConstraintValidator<? extends Annotation, ?>[] EMPTY_VALIDATORS = new ConstraintValidator[0];
private MappingUtil() {}
private MappingUtil() {
}
public static ConstraintValidator<? extends Annotation, ?>[] getValidators(Method getterMethod) {
@ -60,8 +62,8 @@ public final class MappingUtil {
}
}
private static List<ConstraintValidator<? extends Annotation, ?>> addValidators(
Annotation constraintAnnotation, List<ConstraintValidator<? extends Annotation, ?>> list) {
private static List<ConstraintValidator<? extends Annotation, ?>> addValidators(Annotation constraintAnnotation,
List<ConstraintValidator<? extends Annotation, ?>> list) {
Class<? extends Annotation> annotationType = constraintAnnotation.annotationType();
@ -73,8 +75,8 @@ public final class MappingUtil {
for (Class<? extends ConstraintValidator<?, ?>> clazz : constraint.validatedBy()) {
ConstraintValidator<? extends Annotation, ?> validator =
ReflectionInstantiator.instantiateClass(clazz);
ConstraintValidator<? extends Annotation, ?> validator = ReflectionInstantiator
.instantiateClass(clazz);
((ConstraintValidator) validator).initialize(constraintAnnotation);
@ -106,9 +108,7 @@ public final class MappingUtil {
}
}
return indexName != null
? Optional.of(new IdentityName(indexName, forceQuote))
: Optional.empty();
return indexName != null ? Optional.of(new IdentityName(indexName, forceQuote)) : Optional.empty();
}
public static boolean caseSensitiveIndex(Method getterMethod) {

View file

@ -28,10 +28,6 @@ public enum OrderingDirection {
this.cql = cql;
}
public String cql() {
return cql;
}
public static OrderingDirection parseString(String name) {
if (ASC.cql.equalsIgnoreCase(name)) {
@ -42,4 +38,8 @@ public enum OrderingDirection {
throw new HelenusMappingException("invalid ordering direction name " + name);
}
public String cql() {
return cql;
}
}

View file

@ -22,8 +22,7 @@ public enum TypeAndOrdinalColumnComparator implements Comparator<HelenusProperty
public int compare(HelenusProperty thisVal, HelenusProperty anotherVal) {
int c =
Integer.compare(thisVal.getColumnType().ordinal(), anotherVal.getColumnType().ordinal());
int c = Integer.compare(thisVal.getColumnType().ordinal(), anotherVal.getColumnType().ordinal());
if (c == 0) {
c = Integer.compare(thisVal.getOrdinal(), anotherVal.getOrdinal());

View file

@ -19,34 +19,44 @@ import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import net.helenus.mapping.OrderingDirection;
/**
* ClusteringColumn is the family column in legacy Cassandra API
*
* <p>The purpose of this column is have additional dimension in the table. Both @PartitionKey
* and @ClusteringColumn together are parts of the primary key of the table. The primary difference
* between them is that the first one is using for routing purposes in order to locate a data node
* in the cluster, otherwise the second one is using inside the node to locate peace of data in
* <p>
* The purpose of this column is have additional dimension in the table.
* Both @PartitionKey and @ClusteringColumn together are parts of the primary
* key of the table. The primary difference between them is that the first one
* is using for routing purposes in order to locate a data node in the cluster,
* otherwise the second one is using inside the node to locate peace of data in
* concrete machine.
*
* <p>ClusteringColumn can be represented as a Key in SortedMap that fully stored in a single node.
* All developers must be careful for selecting fields for clustering columns, because all data
* inside this SortedMap must fit in to one node.
* <p>
* ClusteringColumn can be represented as a Key in SortedMap that fully stored
* in a single node. All developers must be careful for selecting fields for
* clustering columns, because all data inside this SortedMap must fit in to one
* node.
*
* <p>ClusteringColumn can have more than one part and the order of parts is important. This order
* defines the way how Cassandra joins the parts and influence of data retrieval operations. Each
* part can have ordering property that defines default ascending or descending order of data. In
* case of two and more parts in select queries developer needs to have consisdent order of all
* parts as they defined in table.
* <p>
* ClusteringColumn can have more than one part and the order of parts is
* important. This order defines the way how Cassandra joins the parts and
* influence of data retrieval operations. Each part can have ordering property
* that defines default ascending or descending order of data. In case of two
* and more parts in select queries developer needs to have consisdent order of
* all parts as they defined in table.
*
* <p>For example, first part is ASC ordering, second is also ASC, so Cassandra will sort entries
* like this: a-a a-b b-a b-b In this case we are able run queries: ORDER BY first ASC, second ASC
* ORDER BY first DESC, second DESC WHERE first=? ORDER BY second ASC WHERE first=? ORDER BY second
* DESC WHERE first=? AND second=?
* <p>
* For example, first part is ASC ordering, second is also ASC, so Cassandra
* will sort entries like this: a-a a-b b-a b-b In this case we are able run
* queries: ORDER BY first ASC, second ASC ORDER BY first DESC, second DESC
* WHERE first=? ORDER BY second ASC WHERE first=? ORDER BY second DESC WHERE
* first=? AND second=?
*
* <p>But, we can not run queries: ORDER BY first DESC, second ASC ORDER BY first ASC, second DESC
* WHERE second=? ORDER BY first (ASC,DESC)
* <p>
* But, we can not run queries: ORDER BY first DESC, second ASC ORDER BY first
* ASC, second DESC WHERE second=? ORDER BY first (ASC,DESC)
*/
@Retention(value = RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@ -60,35 +70,40 @@ public @interface ClusteringColumn {
String value() default "";
/**
* ClusteringColumn parts must be ordered in the @Table. It is the requirement of Cassandra.
* Cassandra joins all parts to the final clustering key that is stored in column family name.
* Additionally all parts can have some ordering (ASC, DESC) that with sequence of parts
* determines key comparison function, so Cassandra storing column family names always in sorted
* order.
* ClusteringColumn parts must be ordered in the @Table. It is the requirement
* of Cassandra. Cassandra joins all parts to the final clustering key that is
* stored in column family name. Additionally all parts can have some ordering
* (ASC, DESC) that with sequence of parts determines key comparison function,
* so Cassandra storing column family names always in sorted order.
*
* <p>Be default ordinal has 0 value, that's because in most cases @Table have single column for
* ClusteringColumn If you have 2 and more parts of the ClusteringColumn, then you need to use
* ordinal() to define the sequence of the parts
* <p>
* Be default ordinal has 0 value, that's because in most cases @Table have
* single column for ClusteringColumn If you have 2 and more parts of the
* ClusteringColumn, then you need to use ordinal() to define the sequence of
* the parts
*
* @return number that used to sort clustering columns
*/
int ordinal() default 0;
/**
* Default order of values in the ClusteringColumn This ordering is using for comparison of the
* clustering column values when Cassandra stores it in the sorted order.
* Default order of values in the ClusteringColumn This ordering is using for
* comparison of the clustering column values when Cassandra stores it in the
* sorted order.
*
* <p>Default value is the ascending order
* <p>
* Default value is the ascending order
*
* @return ascending order or descending order of clustering column values
*/
OrderingDirection ordering() default OrderingDirection.ASC;
/**
* For reserved words in Cassandra we need quotation in CQL queries. This property marks that the
* name of the UDT type needs to be quoted.
* For reserved words in Cassandra we need quotation in CQL queries. This
* property marks that the name of the UDT type needs to be quoted.
*
* <p>Default value is false, we are quoting only selected names.
* <p>
* Default value is false, we are quoting only selected names.
*
* @return true if name have to be quoted
*/

View file

@ -18,15 +18,18 @@ package net.helenus.mapping.annotation;
import java.lang.annotation.*;
/**
* Column annotation is used to define additional properties of the column in entity mapping
* interfaces: @Table, @UDT, @Tuple
* Column annotation is used to define additional properties of the column in
* entity mapping interfaces: @Table, @UDT, @Tuple
*
* <p>Column annotation can be used to override default name of the column or to setup order of the
* columns in the mapping
* <p>
* Column annotation can be used to override default name of the column or to
* setup order of the columns in the mapping
*
* <p>Usually for @Table and @UDT types it is not important to define order of the columns, but
* in @Tuple mapping it is required, because tuple itself represents the sequence of the types with
* particular order in the table's column
* <p>
* Usually for @Table and @UDT types it is not important to define order of the
* columns, but in @Tuple mapping it is required, because tuple itself
* represents the sequence of the types with particular order in the table's
* column
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@ -43,18 +46,21 @@ public @interface Column {
/**
* Ordinal will be used for ascending sorting of columns
*
* <p>Default value is 0, because not all mapping entities require all fields to have unique
* ordinals, only @Tuple mapping entity requires all of them to be unique.
* <p>
* Default value is 0, because not all mapping entities require all fields to
* have unique ordinals, only @Tuple mapping entity requires all of them to be
* unique.
*
* @return number that used to sort columns, usually for @Tuple only
*/
int ordinal() default 0;
/**
* For reserved words in Cassandra we need quotation in CQL queries. This property marks that the
* name of the UDT type needs to be quoted.
* For reserved words in Cassandra we need quotation in CQL queries. This
* property marks that the name of the UDT type needs to be quoted.
*
* <p>Default value is false, we are quoting only selected names.
* <p>
* Default value is false, we are quoting only selected names.
*
* @return true if name have to be quoted
*/

View file

@ -16,83 +16,106 @@
package net.helenus.mapping.annotation;
import java.lang.annotation.*;
import javax.validation.Constraint;
import net.helenus.mapping.validator.*;
/**
* Constraint annotations are using for data integrity mostly for @java.lang.String types. The place
* of the annotation is the particular method in model interface.
* Constraint annotations are using for data integrity mostly
* for @java.lang.String types. The place of the annotation is the particular
* method in model interface.
*
* <p>All of them does not have effect on selects and data retrieval operations.
* <p>
* All of them does not have effect on selects and data retrieval operations.
*
* <p>Support types: - @NotNull supports any @java.lang.Object type - All annotations
* support @java.lang.String type
* <p>
* Support types: - @NotNull supports any @java.lang.Object type - All
* annotations support @java.lang.String type
*/
public final class Constraints {
private Constraints() {}
private Constraints() {
}
/**
* NotNull annotation is using to check that value is not null before storing it
*
* <p>Applicable to use in any @java.lang.Object
* <p>
* Applicable to use in any @java.lang.Object
*
* <p>It does not check on selects and data retrieval operations
* <p>
* It does not check on selects and data retrieval operations
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = NotNullValidator.class)
public @interface NotNull {}
public @interface NotNull {
}
/**
* NotEmpty annotation is using to check that value has text before storing it
*
* <p>Also checks for the null and it is more strict annotation then @NotNull
* <p>
* Also checks for the null and it is more strict annotation then @NotNull
*
* <p>Can be used for @java.lang.CharSequence, @ByteBuffer and any array
* <p>
* Can be used for @java.lang.CharSequence, @ByteBuffer and any array
*
* <p>It does not check on selects and data retrieval operations
* <p>
* It does not check on selects and data retrieval operations
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = NotEmptyValidator.class)
public @interface NotEmpty {}
public @interface NotEmpty {
}
/**
* Email annotation is using to check that value has a valid email before storing it
* Email annotation is using to check that value has a valid email before
* storing it
*
* <p>Can be used only for @CharSequence
* <p>
* Can be used only for @CharSequence
*
* <p>It does not check on selects and data retrieval operations
* <p>
* It does not check on selects and data retrieval operations
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = EmailValidator.class)
public @interface Email {}
public @interface Email {
}
/**
* Number annotation is using to check that all letters in value are digits before storing it
* Number annotation is using to check that all letters in value are digits
* before storing it
*
* <p>Can be used only for @java.lang.CharSequence
* <p>
* Can be used only for @java.lang.CharSequence
*
* <p>It does not check on selects and data retrieval operations
* <p>
* It does not check on selects and data retrieval operations
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = NumberValidator.class)
public @interface Number {}
public @interface Number {
}
/**
* Alphabet annotation is using to check that all letters in value are in specific alphabet before
* storing it
* Alphabet annotation is using to check that all letters in value are in
* specific alphabet before storing it
*
* <p>Can be used only for @java.lang.CharSequence
* <p>
* Can be used only for @java.lang.CharSequence
*
* <p>It does not check on selects and data retrieval operations
* <p>
* It does not check on selects and data retrieval operations
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@ -109,11 +132,14 @@ public final class Constraints {
}
/**
* Length annotation is using to ensure that value has exact length before storing it
* Length annotation is using to ensure that value has exact length before
* storing it
*
* <p>Can be used for @java.lang.CharSequence, @ByteBuffer and any array
* <p>
* Can be used for @java.lang.CharSequence, @ByteBuffer and any array
*
* <p>It does not have effect on selects and data retrieval operations
* <p>
* It does not have effect on selects and data retrieval operations
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@ -125,12 +151,14 @@ public final class Constraints {
}
/**
* MaxLength annotation is using to ensure that value has length less or equal to some threshold
* before storing it
* MaxLength annotation is using to ensure that value has length less or equal
* to some threshold before storing it
*
* <p>Can be used for @java.lang.CharSequence, @ByteBuffer and byte[]
* <p>
* Can be used for @java.lang.CharSequence, @ByteBuffer and byte[]
*
* <p>It does not have effect on selects and data retrieval operations
* <p>
* It does not have effect on selects and data retrieval operations
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@ -142,12 +170,14 @@ public final class Constraints {
}
/**
* MinLength annotation is using to ensure that value has length greater or equal to some
* threshold before storing it
* MinLength annotation is using to ensure that value has length greater or
* equal to some threshold before storing it
*
* <p>Can be used for @java.lang.CharSequence, @ByteBuffer and byte[]
* <p>
* Can be used for @java.lang.CharSequence, @ByteBuffer and byte[]
*
* <p>It does not have effect on selects and data retrieval operations
* <p>
* It does not have effect on selects and data retrieval operations
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@ -159,38 +189,48 @@ public final class Constraints {
}
/**
* LowerCase annotation is using to ensure that value is in lower case before storing it
* LowerCase annotation is using to ensure that value is in lower case before
* storing it
*
* <p>Can be used only for @java.lang.CharSequence
* <p>
* Can be used only for @java.lang.CharSequence
*
* <p>It does not have effect on selects and data retrieval operations
* <p>
* It does not have effect on selects and data retrieval operations
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = LowerCaseValidator.class)
public @interface LowerCase {}
public @interface LowerCase {
}
/**
* UpperCase annotation is using to ensure that value is in upper case before storing it
* UpperCase annotation is using to ensure that value is in upper case before
* storing it
*
* <p>Can be used only for @java.lang.CharSequence
* <p>
* Can be used only for @java.lang.CharSequence
*
* <p>It does not have effect on selects and data retrieval operations
* <p>
* It does not have effect on selects and data retrieval operations
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = UpperCaseValidator.class)
public @interface UpperCase {}
public @interface UpperCase {
}
/**
* Pattern annotation is LowerCase annotation is using to ensure that value is upper case before
* storing it
* Pattern annotation is LowerCase annotation is using to ensure that value is
* upper case before storing it
*
* <p>Can be used only for @java.lang.CharSequence
* <p>
* Can be used only for @java.lang.CharSequence
*
* <p>It does not have effect on selects and data retrieval operations
* <p>
* It does not have effect on selects and data retrieval operations
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)

View file

@ -3,17 +3,20 @@ package net.helenus.mapping.annotation;
import java.lang.annotation.*;
/**
* CoveringIndex annotation is using under the specific column or method in entity interface
* with @Table annotation.
* CoveringIndex annotation is using under the specific column or method in
* entity interface with @Table annotation.
*
* <p>A corresponding materialized view will be created based on the underline @Table for the
* specific column.
* <p>
* A corresponding materialized view will be created based on the
* underline @Table for the specific column.
*
* <p>This is useful when you need to perform IN or SORT/ORDER-BY queries and to do so you'll need
* different materialized table on disk in Cassandra.
* <p>
* This is useful when you need to perform IN or SORT/ORDER-BY queries and to do
* so you'll need different materialized table on disk in Cassandra.
*
* <p>For each @Table annotated interface Helenus will create/update/verify Cassandra Materialized
* Views and some indexes if needed on startup.
* <p>
* For each @Table annotated interface Helenus will create/update/verify
* Cassandra Materialized Views and some indexes if needed on startup.
*/
@Inherited
@Retention(RetentionPolicy.RUNTIME)
@ -21,7 +24,8 @@ import java.lang.annotation.*;
public @interface CoveringIndex {
/**
* Defined the name of the index. By default the entity name with column name as suffix.
* Defined the name of the index. By default the entity name with column name as
* suffix.
*
* @return name of the covering index
*/

View file

@ -18,18 +18,22 @@ package net.helenus.mapping.annotation;
import java.lang.annotation.*;
/**
* Index annotation is using under the specific column or method in entity interface with @Table
* annotation.
* Index annotation is using under the specific column or method in entity
* interface with @Table annotation.
*
* <p>The corresponding secondary index will be created in the underline @Table for the specific
* column.
* <p>
* The corresponding secondary index will be created in the underline @Table for
* the specific column.
*
* <p>Currently Cassandra supports only single column index, so this index works only for single
* column.
* <p>
* Currently Cassandra supports only single column index, so this index works
* only for single column.
*
* <p>Make sure that you are using low cardinality columns for this index, that is the requirement
* of the Cassandra. Low cardinality fields examples: gender, country, age, status and etc High
* cardinality fields examples: id, email, timestamp, UUID and etc
* <p>
* Make sure that you are using low cardinality columns for this index, that is
* the requirement of the Cassandra. Low cardinality fields examples: gender,
* country, age, status and etc High cardinality fields examples: id, email,
* timestamp, UUID and etc
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@ -44,17 +48,19 @@ public @interface Index {
String value() default "";
/**
* For reserved words in Cassandra we need quotation in CQL queries. This property marks that the
* name of the UDT type needs to be quoted.
* For reserved words in Cassandra we need quotation in CQL queries. This
* property marks that the name of the UDT type needs to be quoted.
*
* <p>Default value is false, we are quoting only selected names.
* <p>
* Default value is false, we are quoting only selected names.
*
* @return true if name have to be quoted
*/
boolean forceQuote() default false;
/**
* Create a case-insensitive index using Cassandra 3.x+ support for SASI indexing.
* Create a case-insensitive index using Cassandra 3.x+ support for SASI
* indexing.
*
* @return true if the index should ignore case when comparing
*/

View file

@ -20,7 +20,9 @@ import java.lang.annotation.*;
/**
* Inherited Entity annotation
*
* <p>Inherited Table annotation is used to indicate that the methods should also be mapped
* <p>
* Inherited Table annotation is used to indicate that the methods should also
* be mapped
*/
@Inherited
@Retention(RetentionPolicy.RUNTIME)

View file

@ -20,13 +20,17 @@ import java.lang.annotation.*;
/**
* Materialized alternate view of another Entity annotation
*
* <p>MaterializedView annotation is used to define different mapping to some other Table interface
* <p>
* MaterializedView annotation is used to define different mapping to some other
* Table interface
*
* <p>This is useful when you need to perform IN or SORT/ORDER-BY queries and to do so you'll need
* different materialized table on disk in Cassandra.
* <p>
* This is useful when you need to perform IN or SORT/ORDER-BY queries and to do
* so you'll need different materialized table on disk in Cassandra.
*
* <p>For each @Table annotated interface Helenus will create/update/verify Cassandra Materialized
* Views and some indexes if needed on startup.
* <p>
* For each @Table annotated interface Helenus will create/update/verify
* Cassandra Materialized Views and some indexes if needed on startup.
*/
@Inherited
@Retention(RetentionPolicy.RUNTIME)
@ -41,10 +45,11 @@ public @interface MaterializedView {
String value() default "";
/**
* For reserved words in Cassandra we need quotation in CQL queries. This property marks that the
* name of the type needs to be quoted.
* For reserved words in Cassandra we need quotation in CQL queries. This
* property marks that the name of the type needs to be quoted.
*
* <p>Default value is false, we are quoting only selected names.
* <p>
* Default value is false, we are quoting only selected names.
*
* @return true if name have to be quoted
*/

View file

@ -21,16 +21,20 @@ import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* PartitionKey annotation is using to define that particular column is the part of partition key in
* the table.
* PartitionKey annotation is using to define that particular column is the part
* of partition key in the table.
*
* <p>Partition Key is the routing key. Cassandra is using it to find the primary data node in the
* cluster that holds data. Cassandra combines all parts of the partition key to byte array and then
* calculates hash function by using good distribution algorithm (by default MurMur3). After that it
* uses hash number as a token in the ring to find a virtual and then a physical data server.
* <p>
* Partition Key is the routing key. Cassandra is using it to find the primary
* data node in the cluster that holds data. Cassandra combines all parts of the
* partition key to byte array and then calculates hash function by using good
* distribution algorithm (by default MurMur3). After that it uses hash number
* as a token in the ring to find a virtual and then a physical data server.
*
* <p>For @Table mapping entity it is required to have as minimum one PartitionKey column. For @UDT
* and @Tuple mapping entities @PartitionKey annotation is not using.
* <p>
* For @Table mapping entity it is required to have as minimum one PartitionKey
* column. For @UDT and @Tuple mapping entities @PartitionKey annotation is not
* using.
*/
@Retention(value = RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@ -44,23 +48,26 @@ public @interface PartitionKey {
String value() default "";
/**
* PartitionKey parts must be ordered in the @Table. It is the requirement of Cassandra. That is
* how the partition key calculation works, column parts will be joined based on some order and
* final hash/token will be calculated.
* PartitionKey parts must be ordered in the @Table. It is the requirement of
* Cassandra. That is how the partition key calculation works, column parts will
* be joined based on some order and final hash/token will be calculated.
*
* <p>Be default ordinal has 0 value, that's because in most cases @Table have single column
* for @PartitionKey If you have 2 and more parts of the PartitionKey, then you need to use
* ordinal() to define the sequence of the parts
* <p>
* Be default ordinal has 0 value, that's because in most cases @Table have
* single column for @PartitionKey If you have 2 and more parts of the
* PartitionKey, then you need to use ordinal() to define the sequence of the
* parts
*
* @return number that used to sort columns in PartitionKey
*/
int ordinal() default 0;
/**
* For reserved words in Cassandra we need quotation in CQL queries. This property marks that the
* name of the UDT type needs to be quoted.
* For reserved words in Cassandra we need quotation in CQL queries. This
* property marks that the name of the UDT type needs to be quoted.
*
* <p>Default value is false, we are quoting only selected names.
* <p>
* Default value is false, we are quoting only selected names.
*
* @return true if name have to be quoted
*/

View file

@ -23,12 +23,14 @@ import java.lang.annotation.Target;
/**
* StaticColumn annotation is using to define a static column in Cassandra Table
*
* <p>It does not have effect in @UDT and @Tuple types and in @Table-s that does not
* have @ClusteringColumn-s
* <p>
* It does not have effect in @UDT and @Tuple types and in @Table-s that does
* not have @ClusteringColumn-s
*
* <p>In case of using @ClusteringColumn we can repeat some information that is unique for a row.
* For this purpose we can define @StaticColumn annotation, that will create static column in the
* table
* <p>
* In case of using @ClusteringColumn we can repeat some information that is
* unique for a row. For this purpose we can define @StaticColumn annotation,
* that will create static column in the table
*/
@Retention(value = RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@ -49,10 +51,11 @@ public @interface StaticColumn {
int ordinal() default 0;
/**
* For reserved words in Cassandra we need quotation in CQL queries. This property marks that the
* name of the UDT type needs to be quoted.
* For reserved words in Cassandra we need quotation in CQL queries. This
* property marks that the name of the UDT type needs to be quoted.
*
* <p>Default value is false, we are quoting only selected names.
* <p>
* Default value is false, we are quoting only selected names.
*
* @return true if name have to be quoted
*/

View file

@ -20,12 +20,15 @@ import java.lang.annotation.*;
/**
* Entity annotation
*
* <p>Table annotation is used to define Table mapping to some interface
* <p>
* Table annotation is used to define Table mapping to some interface
*
* <p>There are three types of Entity mapping annotations: @Table, @UDT, @Tuple
* <p>
* There are three types of Entity mapping annotations: @Table, @UDT, @Tuple
*
* <p>For each @Table annotated interface Helenus will create/update/verify Cassandra Table and some
* indexes if needed on startup.
* <p>
* For each @Table annotated interface Helenus will create/update/verify
* Cassandra Table and some indexes if needed on startup.
*/
@Inherited
@Retention(RetentionPolicy.RUNTIME)
@ -40,10 +43,11 @@ public @interface Table {
String value() default "";
/**
* For reserved words in Cassandra we need quotation in CQL queries. This property marks that the
* name of the UDT type needs to be quoted.
* For reserved words in Cassandra we need quotation in CQL queries. This
* property marks that the name of the UDT type needs to be quoted.
*
* <p>Default value is false, we are quoting only selected names.
* <p>
* Default value is false, we are quoting only selected names.
*
* @return true if name have to be quoted
*/

View file

@ -17,8 +17,12 @@ package net.helenus.mapping.annotation;
import java.lang.annotation.*;
/** Transient annotation is used to mark properties that are need not be mapped to the database. */
/**
* Transient annotation is used to mark properties that are need not be mapped
* to the database.
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Transient {}
public @interface Transient {
}

View file

@ -20,15 +20,19 @@ import java.lang.annotation.*;
/**
* Entity annotation
*
* <p>Tuple annotation is used to define Tuple type mapping to some interface
* <p>
* Tuple annotation is used to define Tuple type mapping to some interface
*
* <p>There are three types of Entity mapping annotations: @Table, @UDT, @Tuple
* <p>
* There are three types of Entity mapping annotations: @Table, @UDT, @Tuple
*
* <p>Tuple is fully embedded type, it is the sequence of the underline types and the order of the
* sub-types is important, therefore all @Column-s must have ordinal() and only @Column annotation
* supported for underline types
* <p>
* Tuple is fully embedded type, it is the sequence of the underline types and
* the order of the sub-types is important, therefore all @Column-s must have
* ordinal() and only @Column annotation supported for underline types
*/
@Inherited
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.TYPE})
public @interface Tuple {}
public @interface Tuple {
}

View file

@ -15,62 +15,79 @@
*/
package net.helenus.mapping.annotation;
import com.datastax.driver.core.DataType;
import java.lang.annotation.*;
import com.datastax.driver.core.DataType;
/**
* Types annotations are using for clarification of Cassandra data type for particular Java type.
* Types annotations are using for clarification of Cassandra data type for
* particular Java type.
*
* <p>Sometimes it is possible to have for single Java type multiple Cassandra data types: - @String
* can be @DataType.Name.ASCII or @DataType.Name.TEXT or @DataType.Name.VARCHAR - @Long can
* be @DataType.Name.BIGINT or @DataType.Name.COUNTER
* <p>
* Sometimes it is possible to have for single Java type multiple Cassandra data
* types: - @String can be @DataType.Name.ASCII or @DataType.Name.TEXT
* or @DataType.Name.VARCHAR - @Long can be @DataType.Name.BIGINT
* or @DataType.Name.COUNTER
*
* <p>All those type annotations simplify mapping between Java types and Cassandra data types. They
* are not required, for each Java type there is a default Cassandra data type in Helenus, but in
* some cases you would like to control mapping to make sure that the right Cassandra data type is
* using.
* <p>
* All those type annotations simplify mapping between Java types and Cassandra
* data types. They are not required, for each Java type there is a default
* Cassandra data type in Helenus, but in some cases you would like to control
* mapping to make sure that the right Cassandra data type is using.
*
* <p>For complex types like collections, UDF and Tuple types all those annotations are using to
* clarify the sub-type(s) or class/UDF names.
* <p>
* For complex types like collections, UDF and Tuple types all those annotations
* are using to clarify the sub-type(s) or class/UDF names.
*
* <p>Has significant effect on schema operations.
* <p>
* Has significant effect on schema operations.
*/
public final class Types {
private Types() {}
private Types() {
}
/** Says to use @DataType.Name.ASCII data type in schema Java type is @String */
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Ascii {}
public @interface Ascii {
}
/** Says to use @DataType.Name.BIGINT data type in schema Java type is @Long */
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Bigint {}
public @interface Bigint {
}
/**
* Says to use @DataType.Name.BLOB data type in schema Java type is @ByteBuffer or @byte[] Using
* by default
* Says to use @DataType.Name.BLOB data type in schema Java type is @ByteBuffer
* or @byte[] Using by default
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Blob {}
public @interface Blob {
}
/**
* Says to use @DataType.Name.LIST data type in schema with specific sub-type Java type is @List
* Says to use @DataType.Name.LIST data type in schema with specific sub-type
* Java type is @List
*
* <p>Helenus does not allow to use a specific implementation of the collection thereof data
* retrieval operation result can be a collection with another implementation.
* <p>
* Helenus does not allow to use a specific implementation of the collection
* thereof data retrieval operation result can be a collection with another
* implementation.
*
* <p>This annotation is usually used only for sub-types clarification and only in case if
* sub-type is Java type that corresponds to multiple Cassandra data types.
* <p>
* This annotation is usually used only for sub-types clarification and only in
* case if sub-type is Java type that corresponds to multiple Cassandra data
* types.
*
* <p>For this type there are special operations: prepend, prependAll, setIdx, append, appendAll,
* discard and discardAll in @UpdateOperation
* <p>
* For this type there are special operations: prepend, prependAll, setIdx,
* append, appendAll, discard and discardAll in @UpdateOperation
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@ -78,10 +95,11 @@ public final class Types {
public @interface List {
/**
* Clarification of using the sub-type data type in the collection. It supports only simple data
* type (not Collection, UDT or Tuple)
* Clarification of using the sub-type data type in the collection. It supports
* only simple data type (not Collection, UDT or Tuple)
*
* <p>In case if you need UDT sub-type in the list, consider @UDTList annotation
* <p>
* In case if you need UDT sub-type in the list, consider @UDTList annotation
*
* @return data type name of the value
*/
@ -89,15 +107,22 @@ public final class Types {
}
/**
* Says to use @DataType.Name.MAP data type in schema with specific sub-types Java type is @Map
* Says to use @DataType.Name.MAP data type in schema with specific sub-types
* Java type is @Map
*
* <p>Helenus does not allow to use a specific implementation of the collection thereof data
* retrieval operation result can be a collection with another implementation.
* <p>
* Helenus does not allow to use a specific implementation of the collection
* thereof data retrieval operation result can be a collection with another
* implementation.
*
* <p>This annotation is usually used only for sub-types clarification and only in case if
* sub-type is Java type that corresponds to multiple Cassandra data types.
* <p>
* This annotation is usually used only for sub-types clarification and only in
* case if sub-type is Java type that corresponds to multiple Cassandra data
* types.
*
* <p>For this type there are special operations: put and putAll in @UpdateOperation.
* <p>
* For this type there are special operations: put and putAll
* in @UpdateOperation.
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@ -105,22 +130,24 @@ public final class Types {
public @interface Map {
/**
* Clarification of using the sub-type data type in the collection. It supports only simple data
* type (not Collection, UDT or Tuple)
* Clarification of using the sub-type data type in the collection. It supports
* only simple data type (not Collection, UDT or Tuple)
*
* <p>In case if you need UDT key sub-type in the map, consider @UDTKeyMap or @UDTMap
* annotations
* <p>
* In case if you need UDT key sub-type in the map, consider @UDTKeyMap
* or @UDTMap annotations
*
* @return data type name of the key
*/
DataType.Name key();
/**
* Clarification of using the sub-type data type in the collection. It supports only simple data
* type (not Collection, UDT or Tuple)
* Clarification of using the sub-type data type in the collection. It supports
* only simple data type (not Collection, UDT or Tuple)
*
* <p>In case if you need UDT value sub-type in the map, consider @UDTValueMap or @UDTMap
* annotations
* <p>
* In case if you need UDT value sub-type in the map, consider @UDTValueMap
* or @UDTMap annotations
*
* @return data type name of the value
*/
@ -130,24 +157,33 @@ public final class Types {
/**
* Says to use @DataType.Name.COUNTER type in schema Java type is @Long
*
* <p>For this type there are special operations: increment and decrement in @UpdateOperation. You
* do not need to initialize counter value, it will be done automatically by Cassandra.
* <p>
* For this type there are special operations: increment and decrement
* in @UpdateOperation. You do not need to initialize counter value, it will be
* done automatically by Cassandra.
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Counter {}
public @interface Counter {
}
/**
* Says to use @DataType.Name.SET data type in schema with specific sub-type Java type is @Set
* Says to use @DataType.Name.SET data type in schema with specific sub-type
* Java type is @Set
*
* <p>Helenus does not allow to use a specific implementation of the collection thereof data
* retrieval operation result can be a collection with another implementation.
* <p>
* Helenus does not allow to use a specific implementation of the collection
* thereof data retrieval operation result can be a collection with another
* implementation.
*
* <p>This annotation is usually used only for sub-types clarification and only in case if
* sub-type is Java type that corresponds to multiple Cassandra data types.
* <p>
* This annotation is usually used only for sub-types clarification and only in
* case if sub-type is Java type that corresponds to multiple Cassandra data
* types.
*
* <p>For this type there are special operations: add, addAll, remove and removeAll
* <p>
* For this type there are special operations: add, addAll, remove and removeAll
* in @UpdateOperation.
*/
@Documented
@ -156,10 +192,11 @@ public final class Types {
public @interface Set {
/**
* Clarification of using the sub-type data type in the collection. It supports only simple data
* type (not Collection, UDT or Tuple)
* Clarification of using the sub-type data type in the collection. It supports
* only simple data type (not Collection, UDT or Tuple)
*
* <p>In case if you need UDT sub-type in the set, consider @UDTSet annotation
* <p>
* In case if you need UDT sub-type in the set, consider @UDTSet annotation
*
* @return data type name of the value
*/
@ -167,10 +204,12 @@ public final class Types {
}
/**
* Says to use @DataType.Name.CUSTOM type in schema Java type is @ByteBuffer or @byte[]
* Says to use @DataType.Name.CUSTOM type in schema Java type is @ByteBuffer
* or @byte[]
*
* <p>Uses for custom user types that has special implementation. Helenus does not deal with this
* class directly for now, uses only in serialized form.
* <p>
* Uses for custom user types that has special implementation. Helenus does not
* deal with this class directly for now, uses only in serialized form.
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@ -185,39 +224,53 @@ public final class Types {
String className();
}
/** Says to use @DataType.Name.TEXT type in schema Java type is @String Using by default */
/**
* Says to use @DataType.Name.TEXT type in schema Java type is @String Using by
* default
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Text {}
/** Says to use @DataType.Name.TIMESTAMP type in schema Java type is @Date Using by default */
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Timestamp {}
/** Says to use @DataType.Name.TIMEUUID type in schema Java type is @UUID or @Date */
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Timeuuid {}
public @interface Text {
}
/**
* Says to use @DataType.Name.TUPLE type in schema Java type is @TupleValue or model interface
* with @Tuple annotation
* Says to use @DataType.Name.TIMESTAMP type in schema Java type is @Date Using
* by default
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Timestamp {
}
/**
* Says to use @DataType.Name.TIMEUUID type in schema Java type is @UUID
* or @Date
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Timeuuid {
}
/**
* Says to use @DataType.Name.TUPLE type in schema Java type is @TupleValue or
* model interface with @Tuple annotation
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
public @interface Tuple {
/**
* If Java type is the @TupleValue then this field is required. Any Cassandra Tuple is the
* sequence of Cassandra types. For now Helenus supports only simple data types in tuples
* for @TupleValue Java type
* If Java type is the @TupleValue then this field is required. Any Cassandra
* Tuple is the sequence of Cassandra types. For now Helenus supports only
* simple data types in tuples for @TupleValue Java type
*
* <p>In case if Java type is the model interface with @Tuple annotation then all methods in
* this interface can have Types annotations that can be complex types as well.
* <p>
* In case if Java type is the model interface with @Tuple annotation then all
* methods in this interface can have Types annotations that can be complex
* types as well.
*
* @return data type name sequence
*/
@ -225,8 +278,8 @@ public final class Types {
}
/**
* Says to use @DataType.Name.UDT type in schema Java type is @UDTValue or model interface
* with @UDT annotation
* Says to use @DataType.Name.UDT type in schema Java type is @UDTValue or model
* interface with @UDT annotation
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@ -234,13 +287,17 @@ public final class Types {
public @interface UDT {
/**
* If Java type is the @UDTValue then this field is required. Any Cassandra UDT has name and
* must be created before this use as a Cassandra Type.
* If Java type is the @UDTValue then this field is required. Any Cassandra UDT
* has name and must be created before this use as a Cassandra Type.
*
* <p>This value is the UDT name of the Cassandra Type that was already created in the schema
* <p>
* This value is the UDT name of the Cassandra Type that was already created in
* the schema
*
* <p>In case of Java type is the model interface with @UDT annotation then this field is not
* using since model interface defines UserDefinedType with specific name
* <p>
* In case of Java type is the model interface with @UDT annotation then this
* field is not using since model interface defines UserDefinedType with
* specific name
*
* @return UDT name
*/
@ -249,10 +306,13 @@ public final class Types {
/**
* Only used for JavaType @UDTValue
*
* <p>In case if value() method returns reserved word that can not be used as a name of UDT then
* forceQuote will add additional quotes around this name in all CQL queries.
* <p>
* In case if value() method returns reserved word that can not be used as a
* name of UDT then forceQuote will add additional quotes around this name in
* all CQL queries.
*
* <p>Default value is false.
* <p>
* Default value is false.
*
* @return true if quotation is needed
*/
@ -260,16 +320,22 @@ public final class Types {
}
/**
* Says to use @DataType.Name.MAP data type in schema with specific UDT sub-type as a key and
* simple sub-type as a value Java type is @Map
* Says to use @DataType.Name.MAP data type in schema with specific UDT sub-type
* as a key and simple sub-type as a value Java type is @Map
*
* <p>Helenus does not allow to use a specific implementation of the collection thereof data
* retrieval operation result can be a collection with another implementation.
* <p>
* Helenus does not allow to use a specific implementation of the collection
* thereof data retrieval operation result can be a collection with another
* implementation.
*
* <p>This annotation is usually used only for sub-types clarification and only in case if
* sub-type is Java type that corresponds to multiple Cassandra data types.
* <p>
* This annotation is usually used only for sub-types clarification and only in
* case if sub-type is Java type that corresponds to multiple Cassandra data
* types.
*
* <p>For this type there are special operations: put and putAll in @UpdateOperation.
* <p>
* For this type there are special operations: put and putAll
* in @UpdateOperation.
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@ -284,10 +350,12 @@ public final class Types {
UDT key();
/**
* Clarification of using the sub-type data type in the collection. It supports only simple data
* type (not Collection, UDT or Tuple)
* Clarification of using the sub-type data type in the collection. It supports
* only simple data type (not Collection, UDT or Tuple)
*
* <p>In case if you need UDT value sub-type in the map, consider @UDTMap annotations
* <p>
* In case if you need UDT value sub-type in the map, consider @UDTMap
* annotations
*
* @return data type name of the value
*/
@ -295,17 +363,22 @@ public final class Types {
}
/**
* Says to use @DataType.Name.LIST data type in schema with specific UDT sub-type Java type
* is @List
* Says to use @DataType.Name.LIST data type in schema with specific UDT
* sub-type Java type is @List
*
* <p>Helenus does not allow to use a specific implementation of the collection thereof data
* retrieval operation result can be a collection with another implementation.
* <p>
* Helenus does not allow to use a specific implementation of the collection
* thereof data retrieval operation result can be a collection with another
* implementation.
*
* <p>This annotation is usually used only for sub-types clarification and only in case if
* sub-type is Java type that corresponds to multiple Cassandra data types.
* <p>
* This annotation is usually used only for sub-types clarification and only in
* case if sub-type is Java type that corresponds to multiple Cassandra data
* types.
*
* <p>For this type there are special operations: prepend, prependAll, setIdx, append, appendAll,
* discard and discardAll in @UpdateOperation
* <p>
* For this type there are special operations: prepend, prependAll, setIdx,
* append, appendAll, discard and discardAll in @UpdateOperation
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@ -321,16 +394,22 @@ public final class Types {
}
/**
* Says to use @DataType.Name.MAP data type in schema with specific UDT sub-types Java type
* is @Map
* Says to use @DataType.Name.MAP data type in schema with specific UDT
* sub-types Java type is @Map
*
* <p>Helenus does not allow to use a specific implementation of the collection thereof data
* retrieval operation result can be a collection with another implementation.
* <p>
* Helenus does not allow to use a specific implementation of the collection
* thereof data retrieval operation result can be a collection with another
* implementation.
*
* <p>This annotation is usually used only for sub-types clarification and only in case if
* sub-type is Java type that corresponds to multiple Cassandra data types.
* <p>
* This annotation is usually used only for sub-types clarification and only in
* case if sub-type is Java type that corresponds to multiple Cassandra data
* types.
*
* <p>For this type there are special operations: put and putAll in @UpdateOperation.
* <p>
* For this type there are special operations: put and putAll
* in @UpdateOperation.
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@ -345,7 +424,8 @@ public final class Types {
UDT key();
/**
* Clarification of using the UDT data type as a value sub-type in the collection.
* Clarification of using the UDT data type as a value sub-type in the
* collection.
*
* @return annotation of the UDT value
*/
@ -353,15 +433,21 @@ public final class Types {
}
/**
* Says to use @DataType.Name.SET data type in schema with specific UDT sub-type Java type is @Set
* Says to use @DataType.Name.SET data type in schema with specific UDT sub-type
* Java type is @Set
*
* <p>Helenus does not allow to use a specific implementation of the collection thereof data
* retrieval operation result can be a collection with another implementation.
* <p>
* Helenus does not allow to use a specific implementation of the collection
* thereof data retrieval operation result can be a collection with another
* implementation.
*
* <p>This annotation is usually used only for sub-types clarification and only in case if
* sub-type is Java type that corresponds to multiple Cassandra data types.
* <p>
* This annotation is usually used only for sub-types clarification and only in
* case if sub-type is Java type that corresponds to multiple Cassandra data
* types.
*
* <p>For this type there are special operations: add, addAll, remove and removeAll
* <p>
* For this type there are special operations: add, addAll, remove and removeAll
* in @UpdateOperation.
*/
@Documented
@ -378,16 +464,22 @@ public final class Types {
}
/**
* Says to use @DataType.Name.MAP data type in schema with specific simple sub-type as a key and
* UDT sub-type as a value Java type is @Map
* Says to use @DataType.Name.MAP data type in schema with specific simple
* sub-type as a key and UDT sub-type as a value Java type is @Map
*
* <p>Helenus does not allow to use a specific implementation of the collection thereof data
* retrieval operation result can be a collection with another implementation.
* <p>
* Helenus does not allow to use a specific implementation of the collection
* thereof data retrieval operation result can be a collection with another
* implementation.
*
* <p>This annotation is usually used only for sub-types clarification and only in case if
* sub-type is Java type that corresponds to multiple Cassandra data types.
* <p>
* This annotation is usually used only for sub-types clarification and only in
* case if sub-type is Java type that corresponds to multiple Cassandra data
* types.
*
* <p>For this type there are special operations: put and putAll in @UpdateOperation.
* <p>
* For this type there are special operations: put and putAll
* in @UpdateOperation.
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@ -395,32 +487,39 @@ public final class Types {
public @interface UDTValueMap {
/**
* Clarification of using the sub-type data type in the collection. It supports only simple data
* type (not Collection, UDT or Tuple)
* Clarification of using the sub-type data type in the collection. It supports
* only simple data type (not Collection, UDT or Tuple)
*
* <p>In case if you need UDT key sub-type in the map, consider @UDTMap annotations
* <p>
* In case if you need UDT key sub-type in the map, consider @UDTMap annotations
*
* @return data type name of the key
*/
DataType.Name key();
/**
* Clarification of using the UDT data type as a value sub-type in the collection.
* Clarification of using the UDT data type as a value sub-type in the
* collection.
*
* @return annotation of the UDT value
*/
UDT value();
}
/** Says to use @DataType.Name.UUID type in schema Java type is @UUID Using by default */
/**
* Says to use @DataType.Name.UUID type in schema Java type is @UUID Using by
* default
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Uuid {}
public @interface Uuid {
}
/** Says to use @DataType.Name.VARCHAR type in schema Java type is @String */
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Varchar {}
public @interface Varchar {
}
}

Some files were not shown because too many files have changed in this diff Show more