Merge branch 'develop'

This commit is contained in:
Greg Burd 2017-11-02 16:32:22 -04:00
commit 9eaa53c5f0
294 changed files with 15792 additions and 15114 deletions

View file

@ -3,7 +3,6 @@
<component name="EclipseCodeFormatterProjectSettings"> <component name="EclipseCodeFormatterProjectSettings">
<option name="projectSpecificProfile"> <option name="projectSpecificProfile">
<ProjectSpecificProfile> <ProjectSpecificProfile>
<option name="formatter" value="ECLIPSE" />
<option name="pathToConfigFileJava" value="$PROJECT_DIR$/../newton/formatting/onshape-eclipse-general-preferences.epf" /> <option name="pathToConfigFileJava" value="$PROJECT_DIR$/../newton/formatting/onshape-eclipse-general-preferences.epf" />
</ProjectSpecificProfile> </ProjectSpecificProfile>
</option> </option>

261
NOTES
View file

@ -1,172 +1,27 @@
Operation/
|-- AbstractStatementOperation
| |-- AbstractOperation
| | |-- AbstractFilterOperation
| | | |-- CountOperation
| | | |-- DeleteOperation
| | | `-- UpdateOperation
| | |-- BoundOperation
| | `-- InsertOperation
| |-- AbstractOptionalOperation
| | |-- AbstractFilterOptionalOperation
| | | |-- SelectFirstOperation
| | | `-- SelectFirstTransformingOperation
| | `-- BoundOptionalOperation
| `-- AbstractStreamOperation
| |-- AbstractFilterStreamOperation
| | |-- SelectOperation
| | `-- SelectTransformingOperation
| `-- BoundStreamOperation
|-- PreparedOperation
|-- PreparedOptionalOperation
`-- PreparedStreamOperation
--- Cache
// `E` is the type of the Entity class or one of:
// - ResultSet
// - ArrayTuple{N}
// - Count
// `F` is the type argument passed to us from HelenusSession DSL and carried on via one of the
// Operation classes, it is going to be one of:
// - ResultSet
// - ArrayTuple{N}
// - or a type previously registered as a HelenusEntity.
// In the form of a:
// - Stream<?> or an
// - Optional<?>
//
// Operation/
// |-- AbstractStatementOperation
// | |-- AbstractOperation
// | | |-- AbstractFilterOperation
// | | | |-- CountOperation
// | | | |-- DeleteOperation
// | | | `-- UpdateOperation
// | | |-- BoundOperation
// | | `-- InsertOperation
// | |-- AbstractOptionalOperation
// | | |-- AbstractFilterOptionalOperation
// | | | |-- SelectFirstOperation
// | | | `-- SelectFirstTransformingOperation
// | | `-- BoundOptionalOperation
// | `-- AbstractStreamOperation
// | |-- AbstractFilterStreamOperation
// | | |-- SelectOperation
// | | `-- SelectTransformingOperation
// | `-- BoundStreamOperation
// |-- PreparedOperation
// |-- PreparedOptionalOperation
// `-- PreparedStreamOperation
//
// These all boil down to: Select, Update, Insert, Delete and Count
//
// -- Select:
// 1) Select statements that contain all primary key information will be "distinct" and
// result in a single value or no match.
// If present, return cached entity otherwise execute query and cache result.
//
// 2) Otherwise the result is a set, possibly empty, of values that match.
// When within a UOW:
// If present, return the cached value(s) from the statement cache matching the query string.
// Otherwise, execute query and cache the result in the statement cache and update/merge the
// entites into the entity cache.
// NOTE: When we read data from the database we augment the select clause with TTL and write time
// stamps for all columns that record such information so as to be able to properlty expire
// and merge values in the cache.
//
// -- Update:
// Execute the database statement and then iff successs upsert the entity being updated into the
// entity cache.
//
// -- Insert/Upsert:
// Same as Update.
//
// -- Delete:
// Same as update, only remove the cached value from all caches on success.
//
// -- Count:
// If operating within a UOW lookup count in statement cache, if not present execute query and cache result.
//
if (delegate instanceof SelectOperation) {
SelectOperation<E> op = (SelectOperation<E>) delegate;
// Determine if we are caching and if so where.
AbstractCache<CacheKey, Set<E>> cache = delegate.getCache();
boolean prepareStatementForCaching = cache != null;
if (uow != null) {
prepareStatementForCaching = true;
cache = uow.<Set<E>>getCacheEnclosing(cache);
}
// The delegate will provide the cache key becuase it will either be:
// a) when distinct: the combination of the partition/cluster key columns
// b) otherwise: the table name followed by the portion of the SQL statement that would form the WHERE clause
CacheKey key = (cache == null) ? null : delegate.getCacheKey();
if (key != null && cache != null) {
Set<E> value = cache.get(key);
if (value != null) {
// Select will always return a Stream<E>
// TODO(gburd): SelectTransforming... apply fn here?
result = (E) value.stream();
if (cacheHitCounter != null) {
cacheHitCounter.inc();
}
if (log != null) {
log.info("cache hit");
}
return result;
} else {
if (cacheMissCounter != null) {
cacheMissCounter.inc();
}
if (log != null) {
log.info("cache miss");
}
}
}
}
if (cache != null) {
Object obj = delegate.unwrap(result);
if (obj != null) {
cache.put(key, obj);
}
delegate.<E>extract(result, key, cache);
}
}
}
// TODO: first, ask the delegate for the cacheKey
// if this is a SELECT query:
// if not in cache build the statement, execute the future, cache the result, transform the result then cache the transformations
// if INSERT/UPSERT/UPDATE
// if DELETE
// if COUNT
----------------------------
@Override
public CacheKey getCacheKey() {
List<String>keys = new ArrayList<>(filters.size());
HelenusEntity entity = props.get(0).getEntity();
for (HelenusPropertyNode prop : props) {
switch(prop.getProperty().getColumnType()) {
case PARTITION_KEY:
case CLUSTERING_COLUMN:
Filter filter = filters.get(prop.getProperty());
if (filter != null) {
keys.add(filter.toString());
} else {
// we're missing a part of the primary key, so we can't create a proper cache key
return null;
}
break;
default:
// We've past the primary key components in this ordered list, so we're done building
// the cache key.
if (keys.size() > 0) {
return new CacheKey(entity, Joiner.on(",").join(keys));
}
return null;
}
}
return null;
}
---------------------------
// TODO(gburd): create a statement that matches one that wasn't prepared // TODO(gburd): create a statement that matches one that wasn't prepared
//String key = //String key =
// "use " + preparedStatement.getQueryKeyspace() + "; " + preparedStatement.getQueryString(); // "use " + preparedStatement.getQueryKeyspace() + "; " + preparedStatement.getQueryString();
@ -175,64 +30,6 @@
//} //}
------------------------
package net.helenus.core.operation;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.ResultSetFuture;
import com.datastax.driver.core.Statement;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.RemovalListener;
import com.google.common.cache.RemovalNotification;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
public abstract class AbstractCache<K, V> {
final Logger logger = LoggerFactory.getLogger(getClass());
public Cache<K, V> cache;
public AbstractCache() {
RemovalListener<K, V> listener =
new RemovalListener<K, V>() {
@Override
public void onRemoval(RemovalNotification<K, V> n) {
if (n.wasEvicted()) {
String cause = n.getCause().name();
logger.info(cause);
}
}
};
cache = CacheBuilder.newBuilder()
.maximumSize(10_000)
.expireAfterAccess(20, TimeUnit.MINUTES)
.weakKeys()
.softValues()
.removalListener(listener)
.build();
}
V get(K key) {
return cache.getIfPresent(key);
}
void put(K key, V value) {
cache.put(key, value);
}
}
------------------------------------------------------------------------------------------------
cache entites (2 methods) marked @Cacheable
cache entites in txn context
cache results when .cache() chained before .{a}sync() call, return a EvictableCacheItem<E> that has an .evict() method
fix txn .andThen() chains
primitive types have default values, (e.g. boolean, int, ...) but primative wrapper classes do not and can be null (e.g. Boolean, Integer, ...) primitive types have default values, (e.g. boolean, int, ...) but primative wrapper classes do not and can be null (e.g. Boolean, Integer, ...)
@ -372,3 +169,17 @@ begin:
cache.put cache.put
} }
*/ */
------------------
InsertOperation
Class<?> iface = entity.getMappingInterface();
boolean includesNonIdentityValues = values.stream().map(t -> {
ColumnType type = t._1.getProperty().getColumnType();
return !((type == ColumnType.PARTITION_KEY) || (type == ColumnType.CLUSTERING_COLUMN));
})
.reduce(false, (acc, t) -> acc || t);
if (resultType == iface) {
if (values.size() > 0 && includesNonIdentityValues) {
boolean immutable = iface.isAssignableFrom(Drafted.class);

View file

@ -15,9 +15,8 @@
*/ */
package com.datastax.driver.core.querybuilder; package com.datastax.driver.core.querybuilder;
import java.util.List;
import com.datastax.driver.core.CodecRegistry; import com.datastax.driver.core.CodecRegistry;
import java.util.List;
public class IsNotNullClause extends Clause { public class IsNotNullClause extends Clause {

View file

@ -16,8 +16,10 @@ public class CreateCustomIndex extends CreateIndex {
CreateCustomIndex(String indexName) { CreateCustomIndex(String indexName) {
super(indexName); super(indexName);
validateNotEmpty(indexName, "Index name"); validateNotEmpty(indexName, "Index name");
validateNotKeyWord(indexName, validateNotKeyWord(
String.format("The index name '%s' is not allowed because it is a reserved keyword", indexName)); indexName,
String.format(
"The index name '%s' is not allowed because it is a reserved keyword", indexName));
this.indexName = indexName; this.indexName = indexName;
} }
@ -34,20 +36,22 @@ public class CreateCustomIndex extends CreateIndex {
/** /**
* Specify the keyspace and table to create the index on. * Specify the keyspace and table to create the index on.
* *
* @param keyspaceName * @param keyspaceName the keyspace name.
* the keyspace name. * @param tableName the table name.
* @param tableName * @return a {@link CreateIndex.CreateIndexOn} that will allow the specification of the column.
* the table name.
* @return a {@link CreateIndex.CreateIndexOn} that will allow the specification
* of the column.
*/ */
public CreateIndex.CreateIndexOn onTable(String keyspaceName, String tableName) { public CreateIndex.CreateIndexOn onTable(String keyspaceName, String tableName) {
validateNotEmpty(keyspaceName, "Keyspace name"); validateNotEmpty(keyspaceName, "Keyspace name");
validateNotEmpty(tableName, "Table name"); validateNotEmpty(tableName, "Table name");
validateNotKeyWord(keyspaceName, validateNotKeyWord(
String.format("The keyspace name '%s' is not allowed because it is a reserved keyword", keyspaceName)); keyspaceName,
validateNotKeyWord(tableName, String.format(
String.format("The table name '%s' is not allowed because it is a reserved keyword", tableName)); "The keyspace name '%s' is not allowed because it is a reserved keyword",
keyspaceName));
validateNotKeyWord(
tableName,
String.format(
"The table name '%s' is not allowed because it is a reserved keyword", tableName));
this.keyspaceName = Optional.fromNullable(keyspaceName); this.keyspaceName = Optional.fromNullable(keyspaceName);
this.tableName = tableName; this.tableName = tableName;
return new CreateCustomIndex.CreateIndexOn(); return new CreateCustomIndex.CreateIndexOn();
@ -56,15 +60,15 @@ public class CreateCustomIndex extends CreateIndex {
/** /**
* Specify the table to create the index on. * Specify the table to create the index on.
* *
* @param tableName * @param tableName the table name.
* the table name. * @return a {@link CreateIndex.CreateIndexOn} that will allow the specification of the column.
* @return a {@link CreateIndex.CreateIndexOn} that will allow the specification
* of the column.
*/ */
public CreateIndex.CreateIndexOn onTable(String tableName) { public CreateIndex.CreateIndexOn onTable(String tableName) {
validateNotEmpty(tableName, "Table name"); validateNotEmpty(tableName, "Table name");
validateNotKeyWord(tableName, validateNotKeyWord(
String.format("The table name '%s' is not allowed because it is a reserved keyword", tableName)); tableName,
String.format(
"The table name '%s' is not allowed because it is a reserved keyword", tableName));
this.tableName = tableName; this.tableName = tableName;
return new CreateCustomIndex.CreateIndexOn(); return new CreateCustomIndex.CreateIndexOn();
} }
@ -79,7 +83,8 @@ public class CreateCustomIndex extends CreateIndex {
@Override @Override
public String buildInternal() { public String buildInternal() {
StringBuilder createStatement = new StringBuilder(STATEMENT_START).append("CREATE CUSTOM INDEX "); StringBuilder createStatement =
new StringBuilder(STATEMENT_START).append("CREATE CUSTOM INDEX ");
if (ifNotExists) { if (ifNotExists) {
createStatement.append("IF NOT EXISTS "); createStatement.append("IF NOT EXISTS ");
@ -117,14 +122,15 @@ public class CreateCustomIndex extends CreateIndex {
/** /**
* Specify the column to create the index on. * Specify the column to create the index on.
* *
* @param columnName * @param columnName the column name.
* the column name.
* @return the final CREATE INDEX statement. * @return the final CREATE INDEX statement.
*/ */
public SchemaStatement andColumn(String columnName) { public SchemaStatement andColumn(String columnName) {
validateNotEmpty(columnName, "Column name"); validateNotEmpty(columnName, "Column name");
validateNotKeyWord(columnName, validateNotKeyWord(
String.format("The column name '%s' is not allowed because it is a reserved keyword", columnName)); columnName,
String.format(
"The column name '%s' is not allowed because it is a reserved keyword", columnName));
CreateCustomIndex.this.columnName = columnName; CreateCustomIndex.this.columnName = columnName;
return SchemaStatement.fromQueryString(buildInternal()); return SchemaStatement.fromQueryString(buildInternal());
} }
@ -132,14 +138,15 @@ public class CreateCustomIndex extends CreateIndex {
/** /**
* Create an index on the keys of the given map column. * Create an index on the keys of the given map column.
* *
* @param columnName * @param columnName the column name.
* the column name.
* @return the final CREATE INDEX statement. * @return the final CREATE INDEX statement.
*/ */
public SchemaStatement andKeysOfColumn(String columnName) { public SchemaStatement andKeysOfColumn(String columnName) {
validateNotEmpty(columnName, "Column name"); validateNotEmpty(columnName, "Column name");
validateNotKeyWord(columnName, validateNotKeyWord(
String.format("The column name '%s' is not allowed because it is a reserved keyword", columnName)); columnName,
String.format(
"The column name '%s' is not allowed because it is a reserved keyword", columnName));
CreateCustomIndex.this.columnName = columnName; CreateCustomIndex.this.columnName = columnName;
CreateCustomIndex.this.keys = true; CreateCustomIndex.this.keys = true;
return SchemaStatement.fromQueryString(buildInternal()); return SchemaStatement.fromQueryString(buildInternal());

View file

@ -10,7 +10,11 @@ public class CreateMaterializedView extends Create {
private String primaryKey; private String primaryKey;
private String clustering; private String clustering;
public CreateMaterializedView(String keyspaceName, String viewName, Select.Where selection, String primaryKey, public CreateMaterializedView(
String keyspaceName,
String viewName,
Select.Where selection,
String primaryKey,
String clustering) { String clustering) {
super(keyspaceName, viewName); super(keyspaceName, viewName);
this.viewName = viewName; this.viewName = viewName;
@ -24,7 +28,8 @@ public class CreateMaterializedView extends Create {
} }
public String buildInternal() { public String buildInternal() {
StringBuilder createStatement = new StringBuilder(STATEMENT_START).append("CREATE MATERIALIZED VIEW"); StringBuilder createStatement =
new StringBuilder(STATEMENT_START).append("CREATE MATERIALIZED VIEW");
if (ifNotExists) { if (ifNotExists) {
createStatement.append(" IF NOT EXISTS"); createStatement.append(" IF NOT EXISTS");
} }

View file

@ -11,7 +11,8 @@ public class CreateSasiIndex extends CreateCustomIndex {
} }
String getOptions() { String getOptions() {
return "'analyzer_class': " + "'org.apache.cassandra.index.sasi.analyzer.NonTokenizingAnalyzer', " return "'analyzer_class': "
+ "'org.apache.cassandra.index.sasi.analyzer.NonTokenizingAnalyzer', "
+ "'case_sensitive': 'false'"; + "'case_sensitive': 'false'";
} }
} }

View file

@ -4,10 +4,10 @@ import com.google.common.base.Optional;
public class DropMaterializedView extends Drop { public class DropMaterializedView extends Drop {
private final String itemType = "MATERIALIZED VIEW";
private Optional<String> keyspaceName = Optional.absent(); private Optional<String> keyspaceName = Optional.absent();
private String itemName; private String itemName;
private boolean ifExists = true; private boolean ifExists = true;
public DropMaterializedView(String keyspaceName, String viewName) { public DropMaterializedView(String keyspaceName, String viewName) {
this(keyspaceName, viewName, DroppedItem.MATERIALIZED_VIEW); this(keyspaceName, viewName, DroppedItem.MATERIALIZED_VIEW);
} }
@ -31,7 +31,7 @@ public class DropMaterializedView extends Drop {
@Override @Override
public String buildInternal() { public String buildInternal() {
StringBuilder dropStatement = new StringBuilder("DROP " + itemType + " "); StringBuilder dropStatement = new StringBuilder("DROP MATERIALIZED VIEW ");
if (ifExists) { if (ifExists) {
dropStatement.append("IF EXISTS "); dropStatement.append("IF EXISTS ");
} }
@ -44,6 +44,9 @@ public class DropMaterializedView extends Drop {
} }
enum DroppedItem { enum DroppedItem {
TABLE, TYPE, INDEX, MATERIALIZED_VIEW TABLE,
TYPE,
INDEX,
MATERIALIZED_VIEW
} }
} }

View file

@ -17,7 +17,6 @@ package net.helenus.config;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.function.Function; import java.util.function.Function;
import net.helenus.core.DslInstantiator; import net.helenus.core.DslInstantiator;
import net.helenus.core.MapperInstantiator; import net.helenus.core.MapperInstantiator;
import net.helenus.core.reflect.ReflectionDslInstantiator; import net.helenus.core.reflect.ReflectionDslInstantiator;

View file

@ -18,7 +18,6 @@ package net.helenus.config;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.lang.reflect.Modifier; import java.lang.reflect.Modifier;
import java.util.function.Function; import java.util.function.Function;
import net.helenus.mapping.annotation.Transient; import net.helenus.mapping.annotation.Transient;
public enum GetterMethodDetector implements Function<Method, Boolean> { public enum GetterMethodDetector implements Function<Method, Boolean> {

View file

@ -17,7 +17,6 @@ package net.helenus.config;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.function.Function; import java.util.function.Function;
import net.helenus.core.DslInstantiator; import net.helenus.core.DslInstantiator;
import net.helenus.core.MapperInstantiator; import net.helenus.core.MapperInstantiator;

View file

@ -3,7 +3,6 @@ package net.helenus.core;
import java.time.LocalDateTime; import java.time.LocalDateTime;
import java.time.ZoneId; import java.time.ZoneId;
import java.util.Date; import java.util.Date;
import net.helenus.core.reflect.MapExportable; import net.helenus.core.reflect.MapExportable;
public abstract class AbstractAuditedEntityDraft<E> extends AbstractEntityDraft<E> { public abstract class AbstractAuditedEntityDraft<E> extends AbstractEntityDraft<E> {

View file

@ -1,16 +1,13 @@
package net.helenus.core; package net.helenus.core;
import com.google.common.primitives.Primitives;
import java.io.Serializable; import java.io.Serializable;
import java.util.*; import java.util.*;
import org.apache.commons.lang3.SerializationUtils;
import com.google.common.primitives.Primitives;
import net.helenus.core.reflect.DefaultPrimitiveTypes; import net.helenus.core.reflect.DefaultPrimitiveTypes;
import net.helenus.core.reflect.Drafted; import net.helenus.core.reflect.Drafted;
import net.helenus.core.reflect.MapExportable; import net.helenus.core.reflect.MapExportable;
import net.helenus.mapping.MappingUtil; import net.helenus.mapping.MappingUtil;
import org.apache.commons.lang3.SerializationUtils;
public abstract class AbstractEntityDraft<E> implements Drafted<E> { public abstract class AbstractEntityDraft<E> implements Drafted<E> {
@ -150,8 +147,8 @@ public abstract class AbstractEntityDraft<E> implements Drafted<E> {
Map<String, Object> combined; Map<String, Object> combined;
if (entityMap != null && entityMap.size() > 0) { if (entityMap != null && entityMap.size() > 0) {
combined = new HashMap<String, Object>(entityMap.size()); combined = new HashMap<String, Object>(entityMap.size());
for (String key : entityMap.keySet()) { for (Map.Entry<String, Object> e : entityMap.entrySet()) {
combined.put(key, entityMap.get(key)); combined.put(e.getKey(), e.getValue());
} }
} else { } else {
combined = new HashMap<String, Object>(backingMap.size()); combined = new HashMap<String, Object>(backingMap.size());

View file

@ -15,26 +15,23 @@
*/ */
package net.helenus.core; package net.helenus.core;
import java.io.PrintStream; import brave.Tracer;
import java.util.List;
import java.util.concurrent.Executor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.MetricRegistry;
import com.datastax.driver.core.*; import com.datastax.driver.core.*;
import com.google.common.base.Stopwatch; import com.google.common.base.Stopwatch;
import com.google.common.collect.Table; import com.google.common.collect.Table;
import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListenableFuture;
import java.io.PrintStream;
import brave.Tracer; import java.util.List;
import java.util.concurrent.Executor;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
import net.helenus.core.operation.Operation; import net.helenus.core.operation.Operation;
import net.helenus.mapping.value.ColumnValuePreparer; import net.helenus.mapping.value.ColumnValuePreparer;
import net.helenus.mapping.value.ColumnValueProvider; import net.helenus.mapping.value.ColumnValueProvider;
import net.helenus.support.Either; import net.helenus.support.Either;
import net.helenus.support.HelenusException; import net.helenus.support.HelenusException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class AbstractSessionOperations { public abstract class AbstractSessionOperations {
@ -90,7 +87,8 @@ public abstract class AbstractSessionOperations {
return execute(statement, uow, null, showValues); return execute(statement, uow, null, showValues);
} }
public ResultSet execute(Statement statement, UnitOfWork uow, Stopwatch timer, boolean showValues) { public ResultSet execute(
Statement statement, UnitOfWork uow, Stopwatch timer, boolean showValues) {
return executeAsync(statement, uow, timer, showValues).getUninterruptibly(); return executeAsync(statement, uow, timer, showValues).getUninterruptibly();
} }
@ -106,7 +104,8 @@ public abstract class AbstractSessionOperations {
return executeAsync(statement, uow, null, showValues); return executeAsync(statement, uow, null, showValues);
} }
public ResultSetFuture executeAsync(Statement statement, UnitOfWork uow, Stopwatch timer, boolean showValues) { public ResultSetFuture executeAsync(
Statement statement, UnitOfWork uow, Stopwatch timer, boolean showValues) {
try { try {
logStatement(statement, showValues); logStatement(statement, showValues);
return currentSession().executeAsync(statement); return currentSession().executeAsync(statement);
@ -118,7 +117,7 @@ public abstract class AbstractSessionOperations {
private void logStatement(Statement statement, boolean showValues) { private void logStatement(Statement statement, boolean showValues) {
if (isShowCql()) { if (isShowCql()) {
printCql(Operation.queryString(statement, showValues)); printCql(Operation.queryString(statement, showValues));
} else if (LOG.isInfoEnabled()) { } else if (LOG.isDebugEnabled()) {
LOG.info("CQL> " + Operation.queryString(statement, showValues)); LOG.info("CQL> " + Operation.queryString(statement, showValues));
} }
} }
@ -131,8 +130,7 @@ public abstract class AbstractSessionOperations {
return null; return null;
} }
public void mergeCache(Table<String, String, Either<Object, List<Facet>>> uowCache) { public void mergeCache(Table<String, String, Either<Object, List<Facet>>> uowCache) {}
}
RuntimeException translateException(RuntimeException e) { RuntimeException translateException(RuntimeException e) {
if (e instanceof HelenusException) { if (e instanceof HelenusException) {
@ -145,13 +143,11 @@ public abstract class AbstractSessionOperations {
return null; return null;
} }
public void updateCache(Object pojo, List<Facet> facets) { public void updateCache(Object pojo, List<Facet> facets) {}
}
void printCql(String cql) { void printCql(String cql) {
getPrintStream().println(cql); getPrintStream().println(cql);
} }
public void cacheEvict(List<Facet> facets) { public void cacheEvict(List<Facet> facets) {}
}
} }

View file

@ -17,25 +17,23 @@ package net.helenus.core;
import static net.helenus.core.HelenusSession.deleted; import static net.helenus.core.HelenusSession.deleted;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.diffplug.common.base.Errors; import com.diffplug.common.base.Errors;
import com.google.common.base.Stopwatch; import com.google.common.base.Stopwatch;
import com.google.common.collect.HashBasedTable; import com.google.common.collect.HashBasedTable;
import com.google.common.collect.Table; import com.google.common.collect.Table;
import com.google.common.collect.TreeTraverser; import com.google.common.collect.TreeTraverser;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import net.helenus.core.cache.CacheUtil; import net.helenus.core.cache.CacheUtil;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
import net.helenus.support.Either; import net.helenus.support.Either;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** Encapsulates the concept of a "transaction" as a unit-of-work. */ /** Encapsulates the concept of a "transaction" as a unit-of-work. */
public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfWork<E>, AutoCloseable { public abstract class AbstractUnitOfWork<E extends Exception>
implements UnitOfWork<E>, AutoCloseable {
private static final Logger LOG = LoggerFactory.getLogger(AbstractUnitOfWork.class); private static final Logger LOG = LoggerFactory.getLogger(AbstractUnitOfWork.class);
@ -45,6 +43,7 @@ public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfW
private final Table<String, String, Either<Object, List<Facet>>> cache = HashBasedTable.create(); private final Table<String, String, Either<Object, List<Facet>>> cache = HashBasedTable.create();
protected String purpose; protected String purpose;
protected List<String> nestedPurposes = new ArrayList<String>(); protected List<String> nestedPurposes = new ArrayList<String>();
protected String info;
protected int cacheHits = 0; protected int cacheHits = 0;
protected int cacheMisses = 0; protected int cacheMisses = 0;
protected int databaseLookups = 0; protected int databaseLookups = 0;
@ -104,6 +103,11 @@ public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfW
return this; return this;
} }
@Override
public void setInfo(String info) {
this.info = info;
}
@Override @Override
public void recordCacheAndDatabaseOperationCount(int cache, int ops) { public void recordCacheAndDatabaseOperationCount(int cache, int ops) {
if (cache > 0) { if (cache > 0) {
@ -124,32 +128,53 @@ public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfW
String database = ""; String database = "";
if (databaseTime.size() > 0) { if (databaseTime.size() > 0) {
List<String> dbt = new ArrayList<>(databaseTime.size()); List<String> dbt = new ArrayList<>(databaseTime.size());
for (String name : databaseTime.keySet()) { for (Map.Entry<String, Double> dt : databaseTime.entrySet()) {
double t = databaseTime.get(name) / 1000.0; double t = dt.getValue() / 1000.0;
d += t; d += t;
dbt.add(String.format("%s took %,.3fms %,2.2f%%", name, t, (t / e) * 100.0)); dbt.add(String.format("%s took %,.3fms %,2.2f%%", dt.getKey(), t, (t / e) * 100.0));
} }
double fd = (d / e) * 100.0; double fd = (d / e) * 100.0;
database = String.format(", %d quer%s (%,.3fms %,2.2f%% - %s)", databaseLookups, database =
(databaseLookups > 1) ? "ies" : "y", d, fd, String.join(", ", dbt)); String.format(
", %d quer%s (%,.3fms %,2.2f%% - %s)",
databaseLookups, (databaseLookups > 1) ? "ies" : "y", d, fd, String.join(", ", dbt));
} }
String cache = ""; String cache = "";
if (cacheLookupTime > 0) { if (cacheLookupTime > 0) {
int cacheLookups = cacheHits + cacheMisses; int cacheLookups = cacheHits + cacheMisses;
cache = String.format(" with %d cache lookup%s (%,.3fms %,2.2f%% - %,d hit, %,d miss)", cacheLookups, cache =
cacheLookups > 1 ? "s" : "", c, fc, cacheHits, cacheMisses); String.format(
" with %d cache lookup%s (%,.3fms %,2.2f%% - %,d hit, %,d miss)",
cacheLookups, cacheLookups > 1 ? "s" : "", c, fc, cacheHits, cacheMisses);
} }
String da = ""; String da = "";
if (databaseTime.size() > 0 || cacheLookupTime > 0) { if (databaseTime.size() > 0 || cacheLookupTime > 0) {
double dat = d + c; double dat = d + c;
double daf = (dat / e) * 100; double daf = (dat / e) * 100;
da = String.format(" consuming %,.3fms for data access, or %,2.2f%% of total UOW time.", dat, daf); da =
String.format(
" consuming %,.3fms for data access, or %,2.2f%% of total UOW time.", dat, daf);
} }
String x = nestedPurposes.stream().distinct().collect(Collectors.joining(", ")); String x = nestedPurposes.stream().distinct().collect(Collectors.joining(", "));
String n = nested.stream().map(uow -> String.valueOf(uow.hashCode())).collect(Collectors.joining(", ")); String n =
String s = String.format(Locale.US, "UOW(%s%s) %s in %,.3fms%s%s%s%s%s", hashCode(), nested
(nested.size() > 0 ? ", [" + n + "]" : ""), what, e, cache, database, da, .stream()
(purpose == null ? "" : " " + purpose), (nestedPurposes.isEmpty()) ? "" : ", " + x); .map(uow -> String.valueOf(uow.hashCode()))
.collect(Collectors.joining(", "));
String s =
String.format(
Locale.US,
"UOW(%s%s) %s in %,.3fms%s%s%s%s%s%s",
hashCode(),
(nested.size() > 0 ? ", [" + n + "]" : ""),
what,
e,
cache,
database,
da,
(purpose == null ? "" : " " + purpose),
(nestedPurposes.isEmpty()) ? "" : ", " + x,
(info == null) ? "" : " " + info);
return s; return s;
} }
@ -208,7 +233,10 @@ public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfW
} }
// look for other row/col pairs that referenced the same object, mark them // look for other row/col pairs that referenced the same object, mark them
// `deleted` // `deleted`
cache.columnKeySet().forEach(columnKey -> { cache
.columnKeySet()
.forEach(
columnKey -> {
Either<Object, List<Facet>> eitherCachedValue = cache.get(tableName, columnKey); Either<Object, List<Facet>> eitherCachedValue = cache.get(tableName, columnKey);
if (eitherCachedValue.isLeft()) { if (eitherCachedValue.isLeft()) {
Object cachedValue = eitherCachedValue.getLeft(); Object cachedValue = eitherCachedValue.getLeft();
@ -239,19 +267,17 @@ public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfW
} }
/** /**
* Checks to see if the work performed between calling begin and now can be * Checks to see if the work performed between calling begin and now can be committed or not.
* committed or not.
* *
* @return a function from which to chain work that only happens when commit is * @return a function from which to chain work that only happens when commit is successful
* successful * @throws E when the work overlaps with other concurrent writers.
* @throws E
* when the work overlaps with other concurrent writers.
*/ */
public PostCommitFunction<Void, Void> commit() throws E { public PostCommitFunction<Void, Void> commit() throws E {
// All nested UnitOfWork should be committed (not aborted) before calls to // All nested UnitOfWork should be committed (not aborted) before calls to
// commit, check. // commit, check.
boolean canCommit = true; boolean canCommit = true;
TreeTraverser<AbstractUnitOfWork<E>> traverser = TreeTraverser.using(node -> node::getChildNodes); TreeTraverser<AbstractUnitOfWork<E>> traverser =
TreeTraverser.using(node -> node::getChildNodes);
for (AbstractUnitOfWork<E> uow : traverser.postOrderTraversal(this)) { for (AbstractUnitOfWork<E> uow : traverser.postOrderTraversal(this)) {
if (this != uow) { if (this != uow) {
canCommit &= (!uow.aborted && uow.committed); canCommit &= (!uow.aborted && uow.committed);
@ -274,7 +300,10 @@ public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfW
if (parent == null) { if (parent == null) {
// Apply all post-commit functions, this is the outter-most UnitOfWork. // Apply all post-commit functions, this is the outter-most UnitOfWork.
traverser.postOrderTraversal(this).forEach(uow -> { traverser
.postOrderTraversal(this)
.forEach(
uow -> {
uow.applyPostCommitFunctions(); uow.applyPostCommitFunctions();
}); });
@ -293,12 +322,13 @@ public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfW
parent.cacheMisses += cacheMisses; parent.cacheMisses += cacheMisses;
parent.databaseLookups += databaseLookups; parent.databaseLookups += databaseLookups;
parent.cacheLookupTime += cacheLookupTime; parent.cacheLookupTime += cacheLookupTime;
for (String name : databaseTime.keySet()) { for (Map.Entry<String, Double> dt : databaseTime.entrySet()) {
String name = dt.getKey();
if (parent.databaseTime.containsKey(name)) { if (parent.databaseTime.containsKey(name)) {
double t = parent.databaseTime.get(name); double t = parent.databaseTime.get(name);
parent.databaseTime.put(name, t + databaseTime.get(name)); parent.databaseTime.put(name, t + dt.getValue());
} else { } else {
parent.databaseTime.put(name, databaseTime.get(name)); parent.databaseTime.put(name, dt.getValue());
} }
} }
} }
@ -312,8 +342,12 @@ public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfW
/* Explicitly discard the work and mark it as as such in the log. */ /* Explicitly discard the work and mark it as as such in the log. */
public synchronized void abort() { public synchronized void abort() {
TreeTraverser<AbstractUnitOfWork<E>> traverser = TreeTraverser.using(node -> node::getChildNodes); TreeTraverser<AbstractUnitOfWork<E>> traverser =
traverser.postOrderTraversal(this).forEach(uow -> { TreeTraverser.using(node -> node::getChildNodes);
traverser
.postOrderTraversal(this)
.forEach(
uow -> {
uow.committed = false; uow.committed = false;
uow.aborted = true; uow.aborted = true;
}); });
@ -329,11 +363,19 @@ public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfW
private void mergeCache(Table<String, String, Either<Object, List<Facet>>> from) { private void mergeCache(Table<String, String, Either<Object, List<Facet>>> from) {
Table<String, String, Either<Object, List<Facet>>> to = this.cache; Table<String, String, Either<Object, List<Facet>>> to = this.cache;
from.rowMap().forEach((rowKey, columnMap) -> { from.rowMap()
columnMap.forEach((columnKey, value) -> { .forEach(
(rowKey, columnMap) -> {
columnMap.forEach(
(columnKey, value) -> {
if (to.contains(rowKey, columnKey)) { if (to.contains(rowKey, columnKey)) {
// TODO(gburd):... // TODO(gburd):...
to.put(rowKey, columnKey, Either.left(CacheUtil.merge(to.get(rowKey, columnKey).getLeft(), to.put(
rowKey,
columnKey,
Either.left(
CacheUtil.merge(
to.get(rowKey, columnKey).getLeft(),
from.get(rowKey, columnKey).getLeft()))); from.get(rowKey, columnKey).getLeft())));
} else { } else {
to.put(rowKey, columnKey, from.get(rowKey, columnKey)); to.put(rowKey, columnKey, from.get(rowKey, columnKey));

View file

@ -16,5 +16,8 @@
package net.helenus.core; package net.helenus.core;
public enum AutoDdl { public enum AutoDdl {
VALIDATE, UPDATE, CREATE, CREATE_DROP; VALIDATE,
UPDATE,
CREATE,
CREATE_DROP;
} }

View file

@ -15,13 +15,15 @@
*/ */
package net.helenus.core; package net.helenus.core;
import java.util.Optional;
import com.datastax.driver.core.Metadata; import com.datastax.driver.core.Metadata;
import java.util.Optional;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
public interface DslInstantiator { public interface DslInstantiator {
<E> E instantiate(Class<E> iface, ClassLoader classLoader, Optional<HelenusPropertyNode> parent, Metadata metadata); <E> E instantiate(
Class<E> iface,
ClassLoader classLoader,
Optional<HelenusPropertyNode> parent,
Metadata metadata);
} }

View file

@ -15,10 +15,8 @@
*/ */
package net.helenus.core; package net.helenus.core;
import java.util.Objects;
import com.datastax.driver.core.querybuilder.Clause; import com.datastax.driver.core.querybuilder.Clause;
import java.util.Objects;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.MappingUtil; import net.helenus.mapping.MappingUtil;
import net.helenus.mapping.value.ColumnValuePreparer; import net.helenus.mapping.value.ColumnValuePreparer;
@ -87,7 +85,8 @@ public final class Filter<V> {
Objects.requireNonNull(val, "empty value"); Objects.requireNonNull(val, "empty value");
if (op == Operator.IN) { if (op == Operator.IN) {
throw new IllegalArgumentException("invalid usage of the 'in' operator, use Filter.in() static method"); throw new IllegalArgumentException(
"invalid usage of the 'in' operator, use Filter.in() static method");
} }
HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter); HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter);

View file

@ -15,17 +15,15 @@
*/ */
package net.helenus.core; package net.helenus.core;
import com.datastax.driver.core.Cluster;
import com.datastax.driver.core.Metadata;
import com.datastax.driver.core.Session;
import java.util.HashSet; import java.util.HashSet;
import java.util.Map; import java.util.Map;
import java.util.Optional; import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentMap;
import com.datastax.driver.core.Cluster;
import com.datastax.driver.core.Metadata;
import com.datastax.driver.core.Session;
import net.helenus.config.DefaultHelenusSettings; import net.helenus.config.DefaultHelenusSettings;
import net.helenus.config.HelenusSettings; import net.helenus.config.HelenusSettings;
import net.helenus.core.reflect.DslExportable; import net.helenus.core.reflect.DslExportable;
@ -35,14 +33,15 @@ import net.helenus.support.HelenusMappingException;
public final class Helenus { public final class Helenus {
private static final ConcurrentMap<Class<?>, Object> dslCache = new ConcurrentHashMap<Class<?>, Object>(); private static final ConcurrentMap<Class<?>, Object> dslCache =
private static final ConcurrentMap<Class<?>, Metadata> metadataForEntity = new ConcurrentHashMap<Class<?>, Metadata>(); new ConcurrentHashMap<Class<?>, Object>();
private static final ConcurrentMap<Class<?>, Metadata> metadataForEntity =
new ConcurrentHashMap<Class<?>, Metadata>();
private static final Set<HelenusSession> sessions = new HashSet<HelenusSession>(); private static final Set<HelenusSession> sessions = new HashSet<HelenusSession>();
private static volatile HelenusSettings settings = new DefaultHelenusSettings(); private static volatile HelenusSettings settings = new DefaultHelenusSettings();
private static volatile HelenusSession singleton; private static volatile HelenusSession singleton;
private Helenus() { private Helenus() {}
}
protected static void setSession(HelenusSession session) { protected static void setSession(HelenusSession session) {
sessions.add(session); sessions.add(session);
@ -54,7 +53,8 @@ public final class Helenus {
} }
public static void shutdown() { public static void shutdown() {
sessions.forEach((session) -> { sessions.forEach(
(session) -> {
session.close(); session.close();
sessions.remove(session); sessions.remove(session);
}); });
@ -106,7 +106,10 @@ public final class Helenus {
return dsl(iface, classLoader, Optional.empty(), metadata); return dsl(iface, classLoader, Optional.empty(), metadata);
} }
public static <E> E dsl(Class<E> iface, ClassLoader classLoader, Optional<HelenusPropertyNode> parent, public static <E> E dsl(
Class<E> iface,
ClassLoader classLoader,
Optional<HelenusPropertyNode> parent,
Metadata metadata) { Metadata metadata) {
Object instance = null; Object instance = null;

View file

@ -17,6 +17,10 @@ package net.helenus.core;
import static net.helenus.core.Query.eq; import static net.helenus.core.Query.eq;
import brave.Tracer;
import com.codahale.metrics.MetricRegistry;
import com.datastax.driver.core.*;
import com.google.common.collect.Table;
import java.io.Closeable; import java.io.Closeable;
import java.io.PrintStream; import java.io.PrintStream;
import java.lang.reflect.Constructor; import java.lang.reflect.Constructor;
@ -24,16 +28,9 @@ import java.lang.reflect.InvocationTargetException;
import java.util.*; import java.util.*;
import java.util.concurrent.Executor; import java.util.concurrent.Executor;
import java.util.function.Function; import java.util.function.Function;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.codahale.metrics.MetricRegistry;
import com.datastax.driver.core.*;
import com.google.common.collect.Table;
import brave.Tracer;
import net.helenus.core.cache.CacheUtil; import net.helenus.core.cache.CacheUtil;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
import net.helenus.core.cache.SessionCache; import net.helenus.core.cache.SessionCache;
@ -50,11 +47,15 @@ import net.helenus.support.*;
import net.helenus.support.Fun.Tuple1; import net.helenus.support.Fun.Tuple1;
import net.helenus.support.Fun.Tuple2; import net.helenus.support.Fun.Tuple2;
import net.helenus.support.Fun.Tuple6; import net.helenus.support.Fun.Tuple6;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public final class HelenusSession extends AbstractSessionOperations implements Closeable { public class HelenusSession extends AbstractSessionOperations implements Closeable {
public static final Object deleted = new Object(); public static final Object deleted = new Object();
private static final Logger LOG = LoggerFactory.getLogger(HelenusSession.class); private static final Logger LOG = LoggerFactory.getLogger(HelenusSession.class);
private static final Pattern classNameRegex =
Pattern.compile("^(?:\\w+\\.)+(?:(\\w+)|(\\w+)\\$.*)$");
private final Session session; private final Session session;
private final CodecRegistry registry; private final CodecRegistry registry;
@ -74,15 +75,26 @@ public final class HelenusSession extends AbstractSessionOperations implements C
private volatile String usingKeyspace; private volatile String usingKeyspace;
private volatile boolean showCql; private volatile boolean showCql;
HelenusSession(Session session, String usingKeyspace, CodecRegistry registry, boolean showCql, HelenusSession(
PrintStream printStream, SessionRepositoryBuilder sessionRepositoryBuilder, Executor executor, Session session,
boolean dropSchemaOnClose, ConsistencyLevel consistencyLevel, boolean defaultQueryIdempotency, String usingKeyspace,
Class<? extends UnitOfWork> unitOfWorkClass, SessionCache sessionCache, MetricRegistry metricRegistry, CodecRegistry registry,
boolean showCql,
PrintStream printStream,
SessionRepositoryBuilder sessionRepositoryBuilder,
Executor executor,
boolean dropSchemaOnClose,
ConsistencyLevel consistencyLevel,
boolean defaultQueryIdempotency,
Class<? extends UnitOfWork> unitOfWorkClass,
SessionCache sessionCache,
MetricRegistry metricRegistry,
Tracer tracer) { Tracer tracer) {
this.session = session; this.session = session;
this.registry = registry == null ? CodecRegistry.DEFAULT_INSTANCE : registry; this.registry = registry == null ? CodecRegistry.DEFAULT_INSTANCE : registry;
this.usingKeyspace = Objects.requireNonNull(usingKeyspace, this.usingKeyspace =
"keyspace needs to be selected before creating session"); Objects.requireNonNull(
usingKeyspace, "keyspace needs to be selected before creating session");
this.showCql = showCql; this.showCql = showCql;
this.printStream = printStream; this.printStream = printStream;
this.sessionRepository = sessionRepositoryBuilder.build(); this.sessionRepository = sessionRepositoryBuilder.build();
@ -207,7 +219,8 @@ public final class HelenusSession extends AbstractSessionOperations implements C
@Override @Override
public void updateCache(Object pojo, List<Facet> facets) { public void updateCache(Object pojo, List<Facet> facets) {
Map<String, Object> valueMap = pojo instanceof MapExportable ? ((MapExportable) pojo).toMap() : null; Map<String, Object> valueMap =
pojo instanceof MapExportable ? ((MapExportable) pojo).toMap() : null;
List<Facet> boundFacets = new ArrayList<>(); List<Facet> boundFacets = new ArrayList<>();
for (Facet facet : facets) { for (Facet facet : facets) {
if (facet instanceof UnboundFacet) { if (facet instanceof UnboundFacet) {
@ -234,28 +247,40 @@ public final class HelenusSession extends AbstractSessionOperations implements C
} }
String tableName = CacheUtil.schemaName(facets); String tableName = CacheUtil.schemaName(facets);
List<String[]> facetCombinations = CacheUtil.flattenFacets(boundFacets); List<String[]> facetCombinations = CacheUtil.flattenFacets(boundFacets);
mergeAndUpdateCacheValues(pojo, tableName, facetCombinations); replaceCachedFacetValues(pojo, tableName, facetCombinations);
} }
@Override @Override
public void mergeCache(Table<String, String, Either<Object, List<Facet>>> uowCache) { public void mergeCache(Table<String, String, Either<Object, List<Facet>>> uowCache) {
List<Object> items = uowCache.values().stream().filter(Either::isLeft).map(Either::getLeft).distinct() List<Object> items =
uowCache
.values()
.stream()
.filter(Either::isLeft)
.map(Either::getLeft)
.distinct()
.collect(Collectors.toList()); .collect(Collectors.toList());
for (Object pojo : items) { for (Object pojo : items) {
HelenusEntity entity = Helenus.resolve(MappingUtil.getMappingInterface(pojo)); HelenusEntity entity = Helenus.resolve(MappingUtil.getMappingInterface(pojo));
Map<String, Object> valueMap = pojo instanceof MapExportable ? ((MapExportable) pojo).toMap() : null; Map<String, Object> valueMap =
pojo instanceof MapExportable ? ((MapExportable) pojo).toMap() : null;
if (entity.isCacheable()) { if (entity.isCacheable()) {
List<Facet> boundFacets = new ArrayList<>(); List<Facet> boundFacets = new ArrayList<>();
for (Facet facet : entity.getFacets()) { for (Facet facet : entity.getFacets()) {
if (facet instanceof UnboundFacet) { if (facet instanceof UnboundFacet) {
UnboundFacet unboundFacet = (UnboundFacet) facet; UnboundFacet unboundFacet = (UnboundFacet) facet;
UnboundFacet.Binder binder = unboundFacet.binder(); UnboundFacet.Binder binder = unboundFacet.binder();
unboundFacet.getProperties().forEach(prop -> { unboundFacet
.getProperties()
.forEach(
prop -> {
if (valueMap == null) { if (valueMap == null) {
Object value = BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop); Object value =
BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop);
binder.setValueForProperty(prop, value.toString()); binder.setValueForProperty(prop, value.toString());
} else { } else {
binder.setValueForProperty(prop, valueMap.get(prop.getPropertyName()).toString()); binder.setValueForProperty(
prop, valueMap.get(prop.getPropertyName()).toString());
} }
}); });
if (binder.isBound()) { if (binder.isBound()) {
@ -265,14 +290,18 @@ public final class HelenusSession extends AbstractSessionOperations implements C
boundFacets.add(facet); boundFacets.add(facet);
} }
} }
// NOTE: should equal `String tableName = CacheUtil.schemaName(facets);`
List<String[]> facetCombinations = CacheUtil.flattenFacets(boundFacets); List<String[]> facetCombinations = CacheUtil.flattenFacets(boundFacets);
String tableName = CacheUtil.schemaName(boundFacets); String tableName = CacheUtil.schemaName(boundFacets);
mergeAndUpdateCacheValues(pojo, tableName, facetCombinations); replaceCachedFacetValues(pojo, tableName, facetCombinations);
} }
} }
List<List<Facet>> deletedFacetSets = uowCache.values().stream().filter(Either::isRight).map(Either::getRight) List<List<Facet>> deletedFacetSets =
uowCache
.values()
.stream()
.filter(Either::isRight)
.map(Either::getRight)
.collect(Collectors.toList()); .collect(Collectors.toList());
for (List<Facet> facets : deletedFacetSets) { for (List<Facet> facets : deletedFacetSets) {
String tableName = CacheUtil.schemaName(facets); String tableName = CacheUtil.schemaName(facets);
@ -284,21 +313,12 @@ public final class HelenusSession extends AbstractSessionOperations implements C
} }
} }
private void mergeAndUpdateCacheValues(Object pojo, String tableName, List<String[]> facetCombinations) { private void replaceCachedFacetValues(
Object merged = null; Object pojo, String tableName, List<String[]> facetCombinations) {
for (String[] combination : facetCombinations) { for (String[] combination : facetCombinations) {
String cacheKey = tableName + "." + Arrays.toString(combination); String cacheKey = tableName + "." + Arrays.toString(combination);
Object value = sessionCache.get(cacheKey); sessionCache.invalidate(cacheKey);
if (value == null) {
sessionCache.put(cacheKey, pojo); sessionCache.put(cacheKey, pojo);
} else {
if (merged == null) {
merged = pojo;
} else {
merged = CacheUtil.merge(value, pojo);
}
sessionCache.put(cacheKey, merged);
}
} }
} }
@ -310,43 +330,77 @@ public final class HelenusSession extends AbstractSessionOperations implements C
return this.begin(null); return this.begin(null);
} }
private String extractClassNameFromStackFrame(String classNameOnStack) {
String name = null;
Matcher m = classNameRegex.matcher(classNameOnStack);
if (m.find()) {
name = (m.group(1) != null) ? m.group(1) : ((m.group(2) != null) ? m.group(2) : name);
} else {
name = classNameOnStack;
}
return name;
}
public synchronized UnitOfWork begin(UnitOfWork parent) { public synchronized UnitOfWork begin(UnitOfWork parent) {
try { try {
Class<? extends UnitOfWork> clazz = unitOfWorkClass; Class<? extends UnitOfWork> clazz = unitOfWorkClass;
Constructor<? extends UnitOfWork> ctor = clazz.getConstructor(HelenusSession.class, UnitOfWork.class); Constructor<? extends UnitOfWork> ctor =
clazz.getConstructor(HelenusSession.class, UnitOfWork.class);
UnitOfWork uow = ctor.newInstance(this, parent); UnitOfWork uow = ctor.newInstance(this, parent);
if (LOG.isInfoEnabled() && uow.getPurpose() == null) { if (LOG.isInfoEnabled() && uow.getPurpose() == null) {
StringBuilder purpose = null; StringBuilder purpose = null;
int frame = 0;
StackTraceElement[] trace = Thread.currentThread().getStackTrace(); StackTraceElement[] trace = Thread.currentThread().getStackTrace();
int frame = 2; String targetClassName = HelenusSession.class.getSimpleName();
if (trace[2].getMethodName().equals("begin")) { String stackClassName = null;
frame = 3; do {
} else if (trace[2].getClassName().equals(unitOfWorkClass.getName())) { frame++;
frame = 3; stackClassName = extractClassNameFromStackFrame(trace[frame].getClassName());
} } while (!stackClassName.equals(targetClassName) && frame < trace.length);
purpose = new StringBuilder().append(trace[frame].getClassName()).append(".") do {
.append(trace[frame].getMethodName()).append("(").append(trace[frame].getFileName()).append(":") frame++;
.append(trace[frame].getLineNumber()).append(")"); stackClassName = extractClassNameFromStackFrame(trace[frame].getClassName());
} while (stackClassName.equals(targetClassName) && frame < trace.length);
if (frame < trace.length) {
purpose =
new StringBuilder()
.append(trace[frame].getClassName())
.append(".")
.append(trace[frame].getMethodName())
.append("(")
.append(trace[frame].getFileName())
.append(":")
.append(trace[frame].getLineNumber())
.append(")");
uow.setPurpose(purpose.toString()); uow.setPurpose(purpose.toString());
} }
}
if (parent != null) { if (parent != null) {
parent.addNestedUnitOfWork(uow); parent.addNestedUnitOfWork(uow);
} }
return uow.begin(); return uow.begin();
} catch (NoSuchMethodException | InvocationTargetException | InstantiationException } catch (NoSuchMethodException
| InvocationTargetException
| InstantiationException
| IllegalAccessException e) { | IllegalAccessException e) {
throw new HelenusException( throw new HelenusException(
String.format("Unable to instantiate {} as a UnitOfWork.", unitOfWorkClass.getSimpleName()), e); String.format(
"Unable to instantiate %s as a UnitOfWork.", unitOfWorkClass.getSimpleName()),
e);
} }
} }
public <E> SelectOperation<E> select(E pojo) { public <E> SelectOperation<E> select(E pojo) {
Objects.requireNonNull(pojo, "supplied object must be a dsl for a registered entity but cannot be null"); Objects.requireNonNull(
pojo, "supplied object must be a dsl for a registered entity but cannot be null");
ColumnValueProvider valueProvider = getValueProvider(); ColumnValueProvider valueProvider = getValueProvider();
HelenusEntity entity = Helenus.resolve(pojo); HelenusEntity entity = Helenus.resolve(pojo);
Class<?> entityClass = entity.getMappingInterface(); Class<?> entityClass = entity.getMappingInterface();
return new SelectOperation<E>(this, entity, (r) -> { return new SelectOperation<E>(
this,
entity,
(r) -> {
Map<String, Object> map = new ValueProviderMap(r, valueProvider, entity); Map<String, Object> map = new ValueProviderMap(r, valueProvider, entity);
return (E) Helenus.map(entityClass, map); return (E) Helenus.map(entityClass, map);
}); });
@ -357,7 +411,10 @@ public final class HelenusSession extends AbstractSessionOperations implements C
ColumnValueProvider valueProvider = getValueProvider(); ColumnValueProvider valueProvider = getValueProvider();
HelenusEntity entity = Helenus.entity(entityClass); HelenusEntity entity = Helenus.entity(entityClass);
return new SelectOperation<E>(this, entity, (r) -> { return new SelectOperation<E>(
this,
entity,
(r) -> {
Map<String, Object> map = new ValueProviderMap(r, valueProvider, entity); Map<String, Object> map = new ValueProviderMap(r, valueProvider, entity);
return (E) Helenus.map(entityClass, map); return (E) Helenus.map(entityClass, map);
}); });
@ -373,7 +430,8 @@ public final class HelenusSession extends AbstractSessionOperations implements C
} }
public <E> SelectOperation<Row> selectAll(E pojo) { public <E> SelectOperation<Row> selectAll(E pojo) {
Objects.requireNonNull(pojo, "supplied object must be a dsl for a registered entity but cannot be null"); Objects.requireNonNull(
pojo, "supplied object must be a dsl for a registered entity but cannot be null");
HelenusEntity entity = Helenus.resolve(pojo); HelenusEntity entity = Helenus.resolve(pojo);
return new SelectOperation<Row>(this, entity); return new SelectOperation<Row>(this, entity);
} }
@ -388,7 +446,8 @@ public final class HelenusSession extends AbstractSessionOperations implements C
Objects.requireNonNull(getter1, "field 1 is empty"); Objects.requireNonNull(getter1, "field 1 is empty");
HelenusPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1); HelenusPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
return new SelectOperation<Tuple1<V1>>(this, new Mappers.Mapper1<V1>(getValueProvider(), p1), p1); return new SelectOperation<Tuple1<V1>>(
this, new Mappers.Mapper1<V1>(getValueProvider(), p1), p1);
} }
public <V1, V2> SelectOperation<Tuple2<V1, V2>> select(Getter<V1> getter1, Getter<V2> getter2) { public <V1, V2> SelectOperation<Tuple2<V1, V2>> select(Getter<V1> getter1, Getter<V2> getter2) {
@ -397,12 +456,12 @@ public final class HelenusSession extends AbstractSessionOperations implements C
HelenusPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1); HelenusPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
HelenusPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2); HelenusPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
return new SelectOperation<Fun.Tuple2<V1, V2>>(this, new Mappers.Mapper2<V1, V2>(getValueProvider(), p1, p2), return new SelectOperation<Fun.Tuple2<V1, V2>>(
p1, p2); this, new Mappers.Mapper2<V1, V2>(getValueProvider(), p1, p2), p1, p2);
} }
public <V1, V2, V3> SelectOperation<Fun.Tuple3<V1, V2, V3>> select(Getter<V1> getter1, Getter<V2> getter2, public <V1, V2, V3> SelectOperation<Fun.Tuple3<V1, V2, V3>> select(
Getter<V3> getter3) { Getter<V1> getter1, Getter<V2> getter2, Getter<V3> getter3) {
Objects.requireNonNull(getter1, "field 1 is empty"); Objects.requireNonNull(getter1, "field 1 is empty");
Objects.requireNonNull(getter2, "field 2 is empty"); Objects.requireNonNull(getter2, "field 2 is empty");
Objects.requireNonNull(getter3, "field 3 is empty"); Objects.requireNonNull(getter3, "field 3 is empty");
@ -410,12 +469,12 @@ public final class HelenusSession extends AbstractSessionOperations implements C
HelenusPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1); HelenusPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
HelenusPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2); HelenusPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
HelenusPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3); HelenusPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3);
return new SelectOperation<Fun.Tuple3<V1, V2, V3>>(this, return new SelectOperation<Fun.Tuple3<V1, V2, V3>>(
new Mappers.Mapper3<V1, V2, V3>(getValueProvider(), p1, p2, p3), p1, p2, p3); this, new Mappers.Mapper3<V1, V2, V3>(getValueProvider(), p1, p2, p3), p1, p2, p3);
} }
public <V1, V2, V3, V4> SelectOperation<Fun.Tuple4<V1, V2, V3, V4>> select(Getter<V1> getter1, Getter<V2> getter2, public <V1, V2, V3, V4> SelectOperation<Fun.Tuple4<V1, V2, V3, V4>> select(
Getter<V3> getter3, Getter<V4> getter4) { Getter<V1> getter1, Getter<V2> getter2, Getter<V3> getter3, Getter<V4> getter4) {
Objects.requireNonNull(getter1, "field 1 is empty"); Objects.requireNonNull(getter1, "field 1 is empty");
Objects.requireNonNull(getter2, "field 2 is empty"); Objects.requireNonNull(getter2, "field 2 is empty");
Objects.requireNonNull(getter3, "field 3 is empty"); Objects.requireNonNull(getter3, "field 3 is empty");
@ -425,12 +484,21 @@ public final class HelenusSession extends AbstractSessionOperations implements C
HelenusPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2); HelenusPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
HelenusPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3); HelenusPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3);
HelenusPropertyNode p4 = MappingUtil.resolveMappingProperty(getter4); HelenusPropertyNode p4 = MappingUtil.resolveMappingProperty(getter4);
return new SelectOperation<Fun.Tuple4<V1, V2, V3, V4>>(this, return new SelectOperation<Fun.Tuple4<V1, V2, V3, V4>>(
new Mappers.Mapper4<V1, V2, V3, V4>(getValueProvider(), p1, p2, p3, p4), p1, p2, p3, p4); this,
new Mappers.Mapper4<V1, V2, V3, V4>(getValueProvider(), p1, p2, p3, p4),
p1,
p2,
p3,
p4);
} }
public <V1, V2, V3, V4, V5> SelectOperation<Fun.Tuple5<V1, V2, V3, V4, V5>> select(Getter<V1> getter1, public <V1, V2, V3, V4, V5> SelectOperation<Fun.Tuple5<V1, V2, V3, V4, V5>> select(
Getter<V2> getter2, Getter<V3> getter3, Getter<V4> getter4, Getter<V5> getter5) { Getter<V1> getter1,
Getter<V2> getter2,
Getter<V3> getter3,
Getter<V4> getter4,
Getter<V5> getter5) {
Objects.requireNonNull(getter1, "field 1 is empty"); Objects.requireNonNull(getter1, "field 1 is empty");
Objects.requireNonNull(getter2, "field 2 is empty"); Objects.requireNonNull(getter2, "field 2 is empty");
Objects.requireNonNull(getter3, "field 3 is empty"); Objects.requireNonNull(getter3, "field 3 is empty");
@ -442,12 +510,23 @@ public final class HelenusSession extends AbstractSessionOperations implements C
HelenusPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3); HelenusPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3);
HelenusPropertyNode p4 = MappingUtil.resolveMappingProperty(getter4); HelenusPropertyNode p4 = MappingUtil.resolveMappingProperty(getter4);
HelenusPropertyNode p5 = MappingUtil.resolveMappingProperty(getter5); HelenusPropertyNode p5 = MappingUtil.resolveMappingProperty(getter5);
return new SelectOperation<Fun.Tuple5<V1, V2, V3, V4, V5>>(this, return new SelectOperation<Fun.Tuple5<V1, V2, V3, V4, V5>>(
new Mappers.Mapper5<V1, V2, V3, V4, V5>(getValueProvider(), p1, p2, p3, p4, p5), p1, p2, p3, p4, p5); this,
new Mappers.Mapper5<V1, V2, V3, V4, V5>(getValueProvider(), p1, p2, p3, p4, p5),
p1,
p2,
p3,
p4,
p5);
} }
public <V1, V2, V3, V4, V5, V6> SelectOperation<Fun.Tuple6<V1, V2, V3, V4, V5, V6>> select(Getter<V1> getter1, public <V1, V2, V3, V4, V5, V6> SelectOperation<Fun.Tuple6<V1, V2, V3, V4, V5, V6>> select(
Getter<V2> getter2, Getter<V3> getter3, Getter<V4> getter4, Getter<V5> getter5, Getter<V6> getter6) { Getter<V1> getter1,
Getter<V2> getter2,
Getter<V3> getter3,
Getter<V4> getter4,
Getter<V5> getter5,
Getter<V6> getter6) {
Objects.requireNonNull(getter1, "field 1 is empty"); Objects.requireNonNull(getter1, "field 1 is empty");
Objects.requireNonNull(getter2, "field 2 is empty"); Objects.requireNonNull(getter2, "field 2 is empty");
Objects.requireNonNull(getter3, "field 3 is empty"); Objects.requireNonNull(getter3, "field 3 is empty");
@ -461,14 +540,26 @@ public final class HelenusSession extends AbstractSessionOperations implements C
HelenusPropertyNode p4 = MappingUtil.resolveMappingProperty(getter4); HelenusPropertyNode p4 = MappingUtil.resolveMappingProperty(getter4);
HelenusPropertyNode p5 = MappingUtil.resolveMappingProperty(getter5); HelenusPropertyNode p5 = MappingUtil.resolveMappingProperty(getter5);
HelenusPropertyNode p6 = MappingUtil.resolveMappingProperty(getter6); HelenusPropertyNode p6 = MappingUtil.resolveMappingProperty(getter6);
return new SelectOperation<Tuple6<V1, V2, V3, V4, V5, V6>>(this, return new SelectOperation<Tuple6<V1, V2, V3, V4, V5, V6>>(
new Mappers.Mapper6<V1, V2, V3, V4, V5, V6>(getValueProvider(), p1, p2, p3, p4, p5, p6), p1, p2, p3, p4, this,
p5, p6); new Mappers.Mapper6<V1, V2, V3, V4, V5, V6>(getValueProvider(), p1, p2, p3, p4, p5, p6),
p1,
p2,
p3,
p4,
p5,
p6);
} }
public <V1, V2, V3, V4, V5, V6, V7> SelectOperation<Fun.Tuple7<V1, V2, V3, V4, V5, V6, V7>> select( public <V1, V2, V3, V4, V5, V6, V7>
Getter<V1> getter1, Getter<V2> getter2, Getter<V3> getter3, Getter<V4> getter4, Getter<V5> getter5, SelectOperation<Fun.Tuple7<V1, V2, V3, V4, V5, V6, V7>> select(
Getter<V6> getter6, Getter<V7> getter7) { Getter<V1> getter1,
Getter<V2> getter2,
Getter<V3> getter3,
Getter<V4> getter4,
Getter<V5> getter5,
Getter<V6> getter6,
Getter<V7> getter7) {
Objects.requireNonNull(getter1, "field 1 is empty"); Objects.requireNonNull(getter1, "field 1 is empty");
Objects.requireNonNull(getter2, "field 2 is empty"); Objects.requireNonNull(getter2, "field 2 is empty");
Objects.requireNonNull(getter3, "field 3 is empty"); Objects.requireNonNull(getter3, "field 3 is empty");
@ -484,9 +575,17 @@ public final class HelenusSession extends AbstractSessionOperations implements C
HelenusPropertyNode p5 = MappingUtil.resolveMappingProperty(getter5); HelenusPropertyNode p5 = MappingUtil.resolveMappingProperty(getter5);
HelenusPropertyNode p6 = MappingUtil.resolveMappingProperty(getter6); HelenusPropertyNode p6 = MappingUtil.resolveMappingProperty(getter6);
HelenusPropertyNode p7 = MappingUtil.resolveMappingProperty(getter7); HelenusPropertyNode p7 = MappingUtil.resolveMappingProperty(getter7);
return new SelectOperation<Fun.Tuple7<V1, V2, V3, V4, V5, V6, V7>>(this, return new SelectOperation<Fun.Tuple7<V1, V2, V3, V4, V5, V6, V7>>(
new Mappers.Mapper7<V1, V2, V3, V4, V5, V6, V7>(getValueProvider(), p1, p2, p3, p4, p5, p6, p7), p1, p2, this,
p3, p4, p5, p6, p7); new Mappers.Mapper7<V1, V2, V3, V4, V5, V6, V7>(
getValueProvider(), p1, p2, p3, p4, p5, p6, p7),
p1,
p2,
p3,
p4,
p5,
p6,
p7);
} }
public CountOperation count() { public CountOperation count() {
@ -522,7 +621,10 @@ public final class HelenusSession extends AbstractSessionOperations implements C
HelenusEntity entity = Helenus.entity(draft.getEntityClass()); HelenusEntity entity = Helenus.entity(draft.getEntityClass());
// Add all the mutated values contained in the draft. // Add all the mutated values contained in the draft.
entity.getOrderedProperties().forEach(property -> { entity
.getOrderedProperties()
.forEach(
property -> {
switch (property.getColumnType()) { switch (property.getColumnType()) {
case PARTITION_KEY: case PARTITION_KEY:
case CLUSTERING_COLUMN: case CLUSTERING_COLUMN:
@ -531,10 +633,12 @@ public final class HelenusSession extends AbstractSessionOperations implements C
String propertyName = property.getPropertyName(); String propertyName = property.getPropertyName();
if (mutatedProperties.contains(propertyName)) { if (mutatedProperties.contains(propertyName)) {
Object value = map.get(propertyName); Object value = map.get(propertyName);
Getter<Object> getter = new Getter<Object>() { Getter<Object> getter =
new Getter<Object>() {
@Override @Override
public Object get() { public Object get() {
throw new DslPropertyException(new HelenusPropertyNode(property, Optional.empty())); throw new DslPropertyException(
new HelenusPropertyNode(property, Optional.empty()));
} }
}; };
update.set(getter, value); update.set(getter, value);
@ -544,16 +648,21 @@ public final class HelenusSession extends AbstractSessionOperations implements C
// Add the partition and clustering keys if they were in the draft (normally the // Add the partition and clustering keys if they were in the draft (normally the
// case). // case).
entity.getOrderedProperties().forEach(property -> { entity
.getOrderedProperties()
.forEach(
property -> {
switch (property.getColumnType()) { switch (property.getColumnType()) {
case PARTITION_KEY: case PARTITION_KEY:
case CLUSTERING_COLUMN: case CLUSTERING_COLUMN:
String propertyName = property.getPropertyName(); String propertyName = property.getPropertyName();
Object value = map.get(propertyName); Object value = map.get(propertyName);
Getter<Object> getter = new Getter<Object>() { Getter<Object> getter =
new Getter<Object>() {
@Override @Override
public Object get() { public Object get() {
throw new DslPropertyException(new HelenusPropertyNode(property, Optional.empty())); throw new DslPropertyException(
new HelenusPropertyNode(property, Optional.empty()));
} }
}; };
update.where(getter, eq(value)); update.where(getter, eq(value));
@ -581,7 +690,8 @@ public final class HelenusSession extends AbstractSessionOperations implements C
} }
public <T> InsertOperation<T> insert(T pojo) { public <T> InsertOperation<T> insert(T pojo) {
Objects.requireNonNull(pojo, Objects.requireNonNull(
pojo,
"supplied object must be either an instance of the entity class or a dsl for it, but cannot be null"); "supplied object must be either an instance of the entity class or a dsl for it, but cannot be null");
HelenusEntity entity = null; HelenusEntity entity = null;
try { try {
@ -621,7 +731,8 @@ public final class HelenusSession extends AbstractSessionOperations implements C
} }
public <T> InsertOperation<T> upsert(T pojo) { public <T> InsertOperation<T> upsert(T pojo) {
Objects.requireNonNull(pojo, Objects.requireNonNull(
pojo,
"supplied object must be either an instance of the entity class or a dsl for it, but cannot be null"); "supplied object must be either an instance of the entity class or a dsl for it, but cannot be null");
HelenusEntity entity = null; HelenusEntity entity = null;
try { try {
@ -698,6 +809,9 @@ public final class HelenusSession extends AbstractSessionOperations implements C
case UDT: case UDT:
execute(SchemaUtil.dropUserType(entity), true); execute(SchemaUtil.dropUserType(entity), true);
break; break;
default:
throw new HelenusException("Unknown entity type.");
} }
} }
} }

View file

@ -16,9 +16,7 @@
package net.helenus.core; package net.helenus.core;
import java.lang.annotation.Annotation; import java.lang.annotation.Annotation;
import javax.validation.ConstraintValidator; import javax.validation.ConstraintValidator;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
import net.helenus.support.HelenusException; import net.helenus.support.HelenusException;
import net.helenus.support.HelenusMappingException; import net.helenus.support.HelenusMappingException;
@ -37,7 +35,8 @@ public enum HelenusValidator implements PropertyValueValidator {
try { try {
valid = typeless.isValid(value, null); valid = typeless.isValid(value, null);
} catch (ClassCastException e) { } catch (ClassCastException e) {
throw new HelenusMappingException("validator was used for wrong type '" + value + "' in " + prop, e); throw new HelenusMappingException(
"validator was used for wrong type '" + value + "' in " + prop, e);
} }
if (!valid) { if (!valid) {

View file

@ -15,10 +15,8 @@
*/ */
package net.helenus.core; package net.helenus.core;
import java.util.function.Function;
import com.datastax.driver.core.Row; import com.datastax.driver.core.Row;
import java.util.function.Function;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
import net.helenus.mapping.value.ColumnValueProvider; import net.helenus.mapping.value.ColumnValueProvider;
@ -26,8 +24,7 @@ import net.helenus.support.Fun;
public final class Mappers { public final class Mappers {
private Mappers() { private Mappers() {}
}
public static final class Mapper1<A> implements Function<Row, Fun.Tuple1<A>> { public static final class Mapper1<A> implements Function<Row, Fun.Tuple1<A>> {
@ -59,7 +56,8 @@ public final class Mappers {
@Override @Override
public Fun.Tuple2<A, B> apply(Row row) { public Fun.Tuple2<A, B> apply(Row row) {
return new Fun.Tuple2<A, B>(provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2)); return new Fun.Tuple2<A, B>(
provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2));
} }
} }
@ -70,7 +68,10 @@ public final class Mappers {
private final HelenusProperty p2; private final HelenusProperty p2;
private final HelenusProperty p3; private final HelenusProperty p3;
public Mapper3(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2, public Mapper3(
ColumnValueProvider provider,
HelenusPropertyNode p1,
HelenusPropertyNode p2,
HelenusPropertyNode p3) { HelenusPropertyNode p3) {
this.provider = provider; this.provider = provider;
this.p1 = p1.getProperty(); this.p1 = p1.getProperty();
@ -80,7 +81,9 @@ public final class Mappers {
@Override @Override
public Fun.Tuple3<A, B, C> apply(Row row) { public Fun.Tuple3<A, B, C> apply(Row row) {
return new Fun.Tuple3<A, B, C>(provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2), return new Fun.Tuple3<A, B, C>(
provider.getColumnValue(row, 0, p1),
provider.getColumnValue(row, 1, p2),
provider.getColumnValue(row, 2, p3)); provider.getColumnValue(row, 2, p3));
} }
} }
@ -93,8 +96,12 @@ public final class Mappers {
private final HelenusProperty p3; private final HelenusProperty p3;
private final HelenusProperty p4; private final HelenusProperty p4;
public Mapper4(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2, public Mapper4(
HelenusPropertyNode p3, HelenusPropertyNode p4) { ColumnValueProvider provider,
HelenusPropertyNode p1,
HelenusPropertyNode p2,
HelenusPropertyNode p3,
HelenusPropertyNode p4) {
this.provider = provider; this.provider = provider;
this.p1 = p1.getProperty(); this.p1 = p1.getProperty();
this.p2 = p2.getProperty(); this.p2 = p2.getProperty();
@ -104,18 +111,27 @@ public final class Mappers {
@Override @Override
public Fun.Tuple4<A, B, C, D> apply(Row row) { public Fun.Tuple4<A, B, C, D> apply(Row row) {
return new Fun.Tuple4<A, B, C, D>(provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2), return new Fun.Tuple4<A, B, C, D>(
provider.getColumnValue(row, 2, p3), provider.getColumnValue(row, 3, p4)); provider.getColumnValue(row, 0, p1),
provider.getColumnValue(row, 1, p2),
provider.getColumnValue(row, 2, p3),
provider.getColumnValue(row, 3, p4));
} }
} }
public static final class Mapper5<A, B, C, D, E> implements Function<Row, Fun.Tuple5<A, B, C, D, E>> { public static final class Mapper5<A, B, C, D, E>
implements Function<Row, Fun.Tuple5<A, B, C, D, E>> {
private final ColumnValueProvider provider; private final ColumnValueProvider provider;
private final HelenusProperty p1, p2, p3, p4, p5; private final HelenusProperty p1, p2, p3, p4, p5;
public Mapper5(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2, public Mapper5(
HelenusPropertyNode p3, HelenusPropertyNode p4, HelenusPropertyNode p5) { ColumnValueProvider provider,
HelenusPropertyNode p1,
HelenusPropertyNode p2,
HelenusPropertyNode p3,
HelenusPropertyNode p4,
HelenusPropertyNode p5) {
this.provider = provider; this.provider = provider;
this.p1 = p1.getProperty(); this.p1 = p1.getProperty();
this.p2 = p2.getProperty(); this.p2 = p2.getProperty();
@ -126,19 +142,29 @@ public final class Mappers {
@Override @Override
public Fun.Tuple5<A, B, C, D, E> apply(Row row) { public Fun.Tuple5<A, B, C, D, E> apply(Row row) {
return new Fun.Tuple5<A, B, C, D, E>(provider.getColumnValue(row, 0, p1), return new Fun.Tuple5<A, B, C, D, E>(
provider.getColumnValue(row, 1, p2), provider.getColumnValue(row, 2, p3), provider.getColumnValue(row, 0, p1),
provider.getColumnValue(row, 3, p4), provider.getColumnValue(row, 4, p5)); provider.getColumnValue(row, 1, p2),
provider.getColumnValue(row, 2, p3),
provider.getColumnValue(row, 3, p4),
provider.getColumnValue(row, 4, p5));
} }
} }
public static final class Mapper6<A, B, C, D, E, F> implements Function<Row, Fun.Tuple6<A, B, C, D, E, F>> { public static final class Mapper6<A, B, C, D, E, F>
implements Function<Row, Fun.Tuple6<A, B, C, D, E, F>> {
private final ColumnValueProvider provider; private final ColumnValueProvider provider;
private final HelenusProperty p1, p2, p3, p4, p5, p6; private final HelenusProperty p1, p2, p3, p4, p5, p6;
public Mapper6(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2, public Mapper6(
HelenusPropertyNode p3, HelenusPropertyNode p4, HelenusPropertyNode p5, HelenusPropertyNode p6) { ColumnValueProvider provider,
HelenusPropertyNode p1,
HelenusPropertyNode p2,
HelenusPropertyNode p3,
HelenusPropertyNode p4,
HelenusPropertyNode p5,
HelenusPropertyNode p6) {
this.provider = provider; this.provider = provider;
this.p1 = p1.getProperty(); this.p1 = p1.getProperty();
this.p2 = p2.getProperty(); this.p2 = p2.getProperty();
@ -150,20 +176,30 @@ public final class Mappers {
@Override @Override
public Fun.Tuple6<A, B, C, D, E, F> apply(Row row) { public Fun.Tuple6<A, B, C, D, E, F> apply(Row row) {
return new Fun.Tuple6<A, B, C, D, E, F>(provider.getColumnValue(row, 0, p1), return new Fun.Tuple6<A, B, C, D, E, F>(
provider.getColumnValue(row, 1, p2), provider.getColumnValue(row, 2, p3), provider.getColumnValue(row, 0, p1),
provider.getColumnValue(row, 3, p4), provider.getColumnValue(row, 4, p5), provider.getColumnValue(row, 1, p2),
provider.getColumnValue(row, 2, p3),
provider.getColumnValue(row, 3, p4),
provider.getColumnValue(row, 4, p5),
provider.getColumnValue(row, 5, p6)); provider.getColumnValue(row, 5, p6));
} }
} }
public static final class Mapper7<A, B, C, D, E, F, G> implements Function<Row, Fun.Tuple7<A, B, C, D, E, F, G>> { public static final class Mapper7<A, B, C, D, E, F, G>
implements Function<Row, Fun.Tuple7<A, B, C, D, E, F, G>> {
private final ColumnValueProvider provider; private final ColumnValueProvider provider;
private final HelenusProperty p1, p2, p3, p4, p5, p6, p7; private final HelenusProperty p1, p2, p3, p4, p5, p6, p7;
public Mapper7(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2, public Mapper7(
HelenusPropertyNode p3, HelenusPropertyNode p4, HelenusPropertyNode p5, HelenusPropertyNode p6, ColumnValueProvider provider,
HelenusPropertyNode p1,
HelenusPropertyNode p2,
HelenusPropertyNode p3,
HelenusPropertyNode p4,
HelenusPropertyNode p5,
HelenusPropertyNode p6,
HelenusPropertyNode p7) { HelenusPropertyNode p7) {
this.provider = provider; this.provider = provider;
this.p1 = p1.getProperty(); this.p1 = p1.getProperty();
@ -177,10 +213,14 @@ public final class Mappers {
@Override @Override
public Fun.Tuple7<A, B, C, D, E, F, G> apply(Row row) { public Fun.Tuple7<A, B, C, D, E, F, G> apply(Row row) {
return new Fun.Tuple7<A, B, C, D, E, F, G>(provider.getColumnValue(row, 0, p1), return new Fun.Tuple7<A, B, C, D, E, F, G>(
provider.getColumnValue(row, 1, p2), provider.getColumnValue(row, 2, p3), provider.getColumnValue(row, 0, p1),
provider.getColumnValue(row, 3, p4), provider.getColumnValue(row, 4, p5), provider.getColumnValue(row, 1, p2),
provider.getColumnValue(row, 5, p6), provider.getColumnValue(row, 6, p7)); provider.getColumnValue(row, 2, p3),
provider.getColumnValue(row, 3, p4),
provider.getColumnValue(row, 4, p5),
provider.getColumnValue(row, 5, p6),
provider.getColumnValue(row, 6, p7));
} }
} }
} }

View file

@ -1,10 +1,8 @@
package net.helenus.core; package net.helenus.core;
import java.util.Objects;
import com.datastax.driver.core.querybuilder.Ordering; import com.datastax.driver.core.querybuilder.Ordering;
import com.datastax.driver.core.querybuilder.QueryBuilder; import com.datastax.driver.core.querybuilder.QueryBuilder;
import java.util.Objects;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.ColumnType; import net.helenus.mapping.ColumnType;
import net.helenus.mapping.MappingUtil; import net.helenus.mapping.MappingUtil;

View file

@ -17,7 +17,6 @@ package net.helenus.core;
import com.datastax.driver.core.querybuilder.Clause; import com.datastax.driver.core.querybuilder.Clause;
import com.datastax.driver.core.querybuilder.QueryBuilder; import com.datastax.driver.core.querybuilder.QueryBuilder;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.value.ColumnValuePreparer; import net.helenus.mapping.value.ColumnValuePreparer;
import net.helenus.support.HelenusMappingException; import net.helenus.support.HelenusMappingException;
@ -40,8 +39,8 @@ public final class Postulate<V> {
switch (operator) { switch (operator) {
case EQ: case EQ:
return QueryBuilder.eq(node.getColumnName(), return QueryBuilder.eq(
valuePreparer.prepareColumnValue(values[0], node.getProperty())); node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty()));
case IN: case IN:
Object[] preparedValues = new Object[values.length]; Object[] preparedValues = new Object[values.length];
@ -51,20 +50,20 @@ public final class Postulate<V> {
return QueryBuilder.in(node.getColumnName(), preparedValues); return QueryBuilder.in(node.getColumnName(), preparedValues);
case LT: case LT:
return QueryBuilder.lt(node.getColumnName(), return QueryBuilder.lt(
valuePreparer.prepareColumnValue(values[0], node.getProperty())); node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty()));
case LTE: case LTE:
return QueryBuilder.lte(node.getColumnName(), return QueryBuilder.lte(
valuePreparer.prepareColumnValue(values[0], node.getProperty())); node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty()));
case GT: case GT:
return QueryBuilder.gt(node.getColumnName(), return QueryBuilder.gt(
valuePreparer.prepareColumnValue(values[0], node.getProperty())); node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty()));
case GTE: case GTE:
return QueryBuilder.gte(node.getColumnName(), return QueryBuilder.gte(
valuePreparer.prepareColumnValue(values[0], node.getProperty())); node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty()));
default: default:
throw new HelenusMappingException("unknown filter operation " + operator); throw new HelenusMappingException("unknown filter operation " + operator);

View file

@ -15,20 +15,17 @@
*/ */
package net.helenus.core; package net.helenus.core;
import com.datastax.driver.core.querybuilder.BindMarker;
import com.datastax.driver.core.querybuilder.QueryBuilder;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import com.datastax.driver.core.querybuilder.BindMarker;
import com.datastax.driver.core.querybuilder.QueryBuilder;
import net.helenus.mapping.OrderingDirection; import net.helenus.mapping.OrderingDirection;
/** Sugar methods for the queries */ /** Sugar methods for the queries */
public final class Query { public final class Query {
private Query() { private Query() {}
}
public static BindMarker marker() { public static BindMarker marker() {
return QueryBuilder.bindMarker(); return QueryBuilder.bindMarker();

View file

@ -15,16 +15,14 @@
*/ */
package net.helenus.core; package net.helenus.core;
import java.util.*;
import java.util.stream.Collectors;
import com.datastax.driver.core.*; import com.datastax.driver.core.*;
import com.datastax.driver.core.querybuilder.IsNotNullClause; import com.datastax.driver.core.querybuilder.IsNotNullClause;
import com.datastax.driver.core.querybuilder.QueryBuilder; import com.datastax.driver.core.querybuilder.QueryBuilder;
import com.datastax.driver.core.querybuilder.Select; import com.datastax.driver.core.querybuilder.Select;
import com.datastax.driver.core.schemabuilder.*; import com.datastax.driver.core.schemabuilder.*;
import com.datastax.driver.core.schemabuilder.Create.Options; import com.datastax.driver.core.schemabuilder.Create.Options;
import java.util.*;
import java.util.stream.Collectors;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.*; import net.helenus.mapping.*;
import net.helenus.mapping.ColumnType; import net.helenus.mapping.ColumnType;
@ -35,8 +33,7 @@ import net.helenus.support.HelenusMappingException;
public final class SchemaUtil { public final class SchemaUtil {
private SchemaUtil() { private SchemaUtil() {}
}
public static RegularStatement use(String keyspace, boolean forceQuote) { public static RegularStatement use(String keyspace, boolean forceQuote) {
if (forceQuote) { if (forceQuote) {
@ -59,23 +56,31 @@ public final class SchemaUtil {
ColumnType columnType = prop.getColumnType(); ColumnType columnType = prop.getColumnType();
if (columnType == ColumnType.PARTITION_KEY || columnType == ColumnType.CLUSTERING_COLUMN) { if (columnType == ColumnType.PARTITION_KEY || columnType == ColumnType.CLUSTERING_COLUMN) {
throw new HelenusMappingException("primary key columns are not supported in UserDefinedType for " throw new HelenusMappingException(
+ prop.getPropertyName() + " in entity " + entity); "primary key columns are not supported in UserDefinedType for "
+ prop.getPropertyName()
+ " in entity "
+ entity);
} }
try { try {
prop.getDataType().addColumn(create, prop.getColumnName()); prop.getDataType().addColumn(create, prop.getColumnName());
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
throw new HelenusMappingException("invalid column name '" + prop.getColumnName() + "' in entity '" throw new HelenusMappingException(
+ entity.getName().getName() + "'", e); "invalid column name '"
+ prop.getColumnName()
+ "' in entity '"
+ entity.getName().getName()
+ "'",
e);
} }
} }
return create; return create;
} }
public static List<SchemaStatement> alterUserType(UserType userType, HelenusEntity entity, public static List<SchemaStatement> alterUserType(
boolean dropUnusedColumns) { UserType userType, HelenusEntity entity, boolean dropUnusedColumns) {
if (entity.getType() != HelenusEntityType.UDT) { if (entity.getType() != HelenusEntityType.UDT) {
throw new HelenusMappingException("expected UDT entity " + entity); throw new HelenusMappingException("expected UDT entity " + entity);
@ -84,12 +89,13 @@ public final class SchemaUtil {
List<SchemaStatement> result = new ArrayList<SchemaStatement>(); List<SchemaStatement> result = new ArrayList<SchemaStatement>();
/** /**
* TODO: In future replace SchemaBuilder.alterTable by SchemaBuilder.alterType * TODO: In future replace SchemaBuilder.alterTable by SchemaBuilder.alterType when it will
* when it will exist * exist
*/ */
Alter alter = SchemaBuilder.alterTable(entity.getName().toCql()); Alter alter = SchemaBuilder.alterTable(entity.getName().toCql());
final Set<String> visitedColumns = dropUnusedColumns ? new HashSet<String>() : Collections.<String>emptySet(); final Set<String> visitedColumns =
dropUnusedColumns ? new HashSet<String>() : Collections.<String>emptySet();
for (HelenusProperty prop : entity.getOrderedProperties()) { for (HelenusProperty prop : entity.getOrderedProperties()) {
@ -106,8 +112,9 @@ public final class SchemaUtil {
} }
DataType dataType = userType.getFieldType(columnName); DataType dataType = userType.getFieldType(columnName);
SchemaStatement stmt = prop.getDataType().alterColumn(alter, prop.getColumnName(), SchemaStatement stmt =
optional(columnName, dataType)); prop.getDataType()
.alterColumn(alter, prop.getColumnName(), optional(columnName, dataType));
if (stmt != null) { if (stmt != null) {
result.add(stmt); result.add(stmt);
@ -158,21 +165,25 @@ public final class SchemaUtil {
} }
} }
return "(" + ((p.size() > 1) ? "(" + String.join(", ", p) + ")" : p.get(0)) return "("
+ ((c.size() > 0) ? ", " + ((c.size() > 1) ? "(" + String.join(", ", c) + ")" : c.get(0)) : "") + ")"; + ((p.size() > 1) ? "(" + String.join(", ", p) + ")" : p.get(0))
+ ((c.size() > 0)
? ", " + ((c.size() > 1) ? "(" + String.join(", ", c) + ")" : c.get(0))
: "")
+ ")";
} }
public static SchemaStatement createMaterializedView(String keyspace, String viewName, HelenusEntity entity) { public static SchemaStatement createMaterializedView(
String keyspace, String viewName, HelenusEntity entity) {
if (entity.getType() != HelenusEntityType.VIEW) { if (entity.getType() != HelenusEntityType.VIEW) {
throw new HelenusMappingException("expected view entity " + entity); throw new HelenusMappingException("expected view entity " + entity);
} }
if (entity == null) {
throw new HelenusMappingException("no entity or table to select data");
}
List<HelenusPropertyNode> props = new ArrayList<HelenusPropertyNode>(); List<HelenusPropertyNode> props = new ArrayList<HelenusPropertyNode>();
entity.getOrderedProperties().stream().map(p -> new HelenusPropertyNode(p, Optional.empty())) entity
.getOrderedProperties()
.stream()
.map(p -> new HelenusPropertyNode(p, Optional.empty()))
.forEach(p -> props.add(p)); .forEach(p -> props.add(p));
Select.Selection selection = QueryBuilder.select(); Select.Selection selection = QueryBuilder.select();
@ -196,8 +207,8 @@ public final class SchemaUtil {
case CLUSTERING_COLUMN: case CLUSTERING_COLUMN:
where = where.and(new IsNotNullClause(columnName)); where = where.and(new IsNotNullClause(columnName));
ClusteringColumn clusteringColumn = prop.getProperty().getGetterMethod() ClusteringColumn clusteringColumn =
.getAnnotation(ClusteringColumn.class); prop.getProperty().getGetterMethod().getAnnotation(ClusteringColumn.class);
if (clusteringColumn != null && clusteringColumn.ordering() != null) { if (clusteringColumn != null && clusteringColumn.ordering() != null) {
o.add(columnName + " " + clusteringColumn.ordering().cql()); o.add(columnName + " " + clusteringColumn.ordering().cql());
} }
@ -213,10 +224,12 @@ public final class SchemaUtil {
if (o.size() > 0) { if (o.size() > 0) {
clustering = "WITH CLUSTERING ORDER BY (" + String.join(", ", o) + ")"; clustering = "WITH CLUSTERING ORDER BY (" + String.join(", ", o) + ")";
} }
return new CreateMaterializedView(keyspace, viewName, where, primaryKey, clustering).ifNotExists(); return new CreateMaterializedView(keyspace, viewName, where, primaryKey, clustering)
.ifNotExists();
} }
public static SchemaStatement dropMaterializedView(String keyspace, String viewName, HelenusEntity entity) { public static SchemaStatement dropMaterializedView(
String keyspace, String viewName, HelenusEntity entity) {
return new DropMaterializedView(keyspace, viewName); return new DropMaterializedView(keyspace, viewName);
} }
@ -249,14 +262,15 @@ public final class SchemaUtil {
if (!clusteringColumns.isEmpty()) { if (!clusteringColumns.isEmpty()) {
Options options = create.withOptions(); Options options = create.withOptions();
clusteringColumns clusteringColumns.forEach(
.forEach(p -> options.clusteringOrder(p.getColumnName().toCql(), mapDirection(p.getOrdering()))); p -> options.clusteringOrder(p.getColumnName().toCql(), mapDirection(p.getOrdering())));
} }
return create; return create;
} }
public static List<SchemaStatement> alterTable(TableMetadata tmd, HelenusEntity entity, boolean dropUnusedColumns) { public static List<SchemaStatement> alterTable(
TableMetadata tmd, HelenusEntity entity, boolean dropUnusedColumns) {
if (entity.getType() != HelenusEntityType.TABLE) { if (entity.getType() != HelenusEntityType.TABLE) {
throw new HelenusMappingException("expected table entity " + entity); throw new HelenusMappingException("expected table entity " + entity);
@ -266,7 +280,8 @@ public final class SchemaUtil {
Alter alter = SchemaBuilder.alterTable(entity.getName().toCql()); Alter alter = SchemaBuilder.alterTable(entity.getName().toCql());
final Set<String> visitedColumns = dropUnusedColumns ? new HashSet<String>() : Collections.<String>emptySet(); final Set<String> visitedColumns =
dropUnusedColumns ? new HashSet<String>() : Collections.<String>emptySet();
for (HelenusProperty prop : entity.getOrderedProperties()) { for (HelenusProperty prop : entity.getOrderedProperties()) {
@ -283,8 +298,8 @@ public final class SchemaUtil {
} }
ColumnMetadata columnMetadata = tmd.getColumn(columnName); ColumnMetadata columnMetadata = tmd.getColumn(columnName);
SchemaStatement stmt = prop.getDataType().alterColumn(alter, prop.getColumnName(), SchemaStatement stmt =
optional(columnMetadata)); prop.getDataType().alterColumn(alter, prop.getColumnName(), optional(columnMetadata));
if (stmt != null) { if (stmt != null) {
result.add(stmt); result.add(stmt);
@ -314,28 +329,42 @@ public final class SchemaUtil {
public static SchemaStatement createIndex(HelenusProperty prop) { public static SchemaStatement createIndex(HelenusProperty prop) {
if (prop.caseSensitiveIndex()) { if (prop.caseSensitiveIndex()) {
return SchemaBuilder.createIndex(prop.getIndexName().get().toCql()).ifNotExists() return SchemaBuilder.createIndex(prop.getIndexName().get().toCql())
.onTable(prop.getEntity().getName().toCql()).andColumn(prop.getColumnName().toCql()); .ifNotExists()
.onTable(prop.getEntity().getName().toCql())
.andColumn(prop.getColumnName().toCql());
} else { } else {
return new CreateSasiIndex(prop.getIndexName().get().toCql()).ifNotExists() return new CreateSasiIndex(prop.getIndexName().get().toCql())
.onTable(prop.getEntity().getName().toCql()).andColumn(prop.getColumnName().toCql()); .ifNotExists()
.onTable(prop.getEntity().getName().toCql())
.andColumn(prop.getColumnName().toCql());
} }
} }
public static List<SchemaStatement> createIndexes(HelenusEntity entity) { public static List<SchemaStatement> createIndexes(HelenusEntity entity) {
return entity.getOrderedProperties().stream().filter(p -> p.getIndexName().isPresent()) return entity
.map(p -> SchemaUtil.createIndex(p)).collect(Collectors.toList()); .getOrderedProperties()
.stream()
.filter(p -> p.getIndexName().isPresent())
.map(p -> SchemaUtil.createIndex(p))
.collect(Collectors.toList());
} }
public static List<SchemaStatement> alterIndexes(TableMetadata tmd, HelenusEntity entity, public static List<SchemaStatement> alterIndexes(
boolean dropUnusedIndexes) { TableMetadata tmd, HelenusEntity entity, boolean dropUnusedIndexes) {
List<SchemaStatement> list = new ArrayList<SchemaStatement>(); List<SchemaStatement> list = new ArrayList<SchemaStatement>();
final Set<String> visitedColumns = dropUnusedIndexes ? new HashSet<String>() : Collections.<String>emptySet(); final Set<String> visitedColumns =
dropUnusedIndexes ? new HashSet<String>() : Collections.<String>emptySet();
entity.getOrderedProperties().stream().filter(p -> p.getIndexName().isPresent()).forEach(p -> { entity
.getOrderedProperties()
.stream()
.filter(p -> p.getIndexName().isPresent())
.forEach(
p -> {
String columnName = p.getColumnName().getName(); String columnName = p.getColumnName().getName();
if (dropUnusedIndexes) { if (dropUnusedIndexes) {
@ -356,9 +385,11 @@ public final class SchemaUtil {
if (dropUnusedIndexes) { if (dropUnusedIndexes) {
tmd.getColumns().stream() tmd.getColumns()
.stream()
.filter(c -> tmd.getIndex(c.getName()) != null && !visitedColumns.contains(c.getName())) .filter(c -> tmd.getIndex(c.getName()) != null && !visitedColumns.contains(c.getName()))
.forEach(c -> { .forEach(
c -> {
list.add(SchemaBuilder.dropIndex(tmd.getIndex(c.getName()).getName()).ifExists()); list.add(SchemaBuilder.dropIndex(tmd.getIndex(c.getName()).getName()).ifExists());
}); });
} }
@ -384,7 +415,10 @@ public final class SchemaUtil {
throw new HelenusMappingException( throw new HelenusMappingException(
"only primitive types and Set,List,Map collections and UserDefinedTypes are allowed, unknown type for property '" "only primitive types and Set,List,Map collections and UserDefinedTypes are allowed, unknown type for property '"
+ prop.getPropertyName() + "' type is '" + prop.getJavaType() + "' in the entity " + prop.getPropertyName()
+ "' type is '"
+ prop.getJavaType()
+ "' in the entity "
+ prop.getEntity()); + prop.getEntity());
} }

View file

@ -15,18 +15,16 @@
*/ */
package net.helenus.core; package net.helenus.core;
import brave.Tracer;
import com.codahale.metrics.MetricRegistry;
import com.datastax.driver.core.*;
import com.google.common.util.concurrent.MoreExecutors;
import java.io.IOException; import java.io.IOException;
import java.io.PrintStream; import java.io.PrintStream;
import java.util.*; import java.util.*;
import java.util.concurrent.Executor; import java.util.concurrent.Executor;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
import java.util.function.Consumer; import java.util.function.Consumer;
import com.codahale.metrics.MetricRegistry;
import com.datastax.driver.core.*;
import com.google.common.util.concurrent.MoreExecutors;
import brave.Tracer;
import net.helenus.core.cache.SessionCache; import net.helenus.core.cache.SessionCache;
import net.helenus.core.reflect.DslExportable; import net.helenus.core.reflect.DslExportable;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
@ -186,8 +184,11 @@ public final class SessionInitializer extends AbstractSessionOperations {
public SessionInitializer addPackage(String packageName) { public SessionInitializer addPackage(String packageName) {
try { try {
PackageUtil.getClasses(packageName).stream().filter(c -> c.isInterface() && !c.isAnnotation()) PackageUtil.getClasses(packageName)
.forEach(clazz -> { .stream()
.filter(c -> c.isInterface() && !c.isAnnotation())
.forEach(
clazz -> {
initList.add(Either.right(clazz)); initList.add(Either.right(clazz));
}); });
} catch (IOException | ClassNotFoundException e) { } catch (IOException | ClassNotFoundException e) {
@ -249,16 +250,29 @@ public final class SessionInitializer extends AbstractSessionOperations {
public synchronized HelenusSession get() { public synchronized HelenusSession get() {
initialize(); initialize();
return new HelenusSession(session, usingKeyspace, registry, showCql, printStream, sessionRepository, executor, return new HelenusSession(
autoDdl == AutoDdl.CREATE_DROP, consistencyLevel, idempotent, unitOfWorkClass, sessionCache, session,
metricRegistry, zipkinTracer); usingKeyspace,
registry,
showCql,
printStream,
sessionRepository,
executor,
autoDdl == AutoDdl.CREATE_DROP,
consistencyLevel,
idempotent,
unitOfWorkClass,
sessionCache,
metricRegistry,
zipkinTracer);
} }
private void initialize() { private void initialize() {
Objects.requireNonNull(usingKeyspace, "please define keyspace by 'use' operator"); Objects.requireNonNull(usingKeyspace, "please define keyspace by 'use' operator");
initList.forEach((either) -> { initList.forEach(
(either) -> {
Class<?> iface = null; Class<?> iface = null;
if (either.isLeft()) { if (either.isLeft()) {
iface = MappingUtil.getMappingInterface(either.getLeft()); iface = MappingUtil.getMappingInterface(either.getLeft());
@ -280,13 +294,19 @@ public final class SessionInitializer extends AbstractSessionOperations {
// Drop view first, otherwise a `DROP TABLE ...` will fail as the type is still // Drop view first, otherwise a `DROP TABLE ...` will fail as the type is still
// referenced // referenced
// by a view. // by a view.
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.VIEW) sessionRepository
.entities()
.stream()
.filter(e -> e.getType() == HelenusEntityType.VIEW)
.forEach(e -> tableOps.dropView(e)); .forEach(e -> tableOps.dropView(e));
// Drop tables second, before DROP TYPE otherwise a `DROP TYPE ...` will fail as // Drop tables second, before DROP TYPE otherwise a `DROP TYPE ...` will fail as
// the type is // the type is
// still referenced by a table. // still referenced by a table.
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.TABLE) sessionRepository
.entities()
.stream()
.filter(e -> e.getType() == HelenusEntityType.TABLE)
.forEach(e -> tableOps.dropTable(e)); .forEach(e -> tableOps.dropTable(e));
eachUserTypeInReverseOrder(userTypeOps, e -> userTypeOps.dropUserType(e)); eachUserTypeInReverseOrder(userTypeOps, e -> userTypeOps.dropUserType(e));
@ -296,10 +316,16 @@ public final class SessionInitializer extends AbstractSessionOperations {
case CREATE: case CREATE:
eachUserTypeInOrder(userTypeOps, e -> userTypeOps.createUserType(e)); eachUserTypeInOrder(userTypeOps, e -> userTypeOps.createUserType(e));
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.TABLE) sessionRepository
.entities()
.stream()
.filter(e -> e.getType() == HelenusEntityType.TABLE)
.forEach(e -> tableOps.createTable(e)); .forEach(e -> tableOps.createTable(e));
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.VIEW) sessionRepository
.entities()
.stream()
.filter(e -> e.getType() == HelenusEntityType.VIEW)
.forEach(e -> tableOps.createView(e)); .forEach(e -> tableOps.createView(e));
break; break;
@ -307,7 +333,10 @@ public final class SessionInitializer extends AbstractSessionOperations {
case VALIDATE: case VALIDATE:
eachUserTypeInOrder(userTypeOps, e -> userTypeOps.validateUserType(getUserType(e), e)); eachUserTypeInOrder(userTypeOps, e -> userTypeOps.validateUserType(getUserType(e), e));
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.TABLE) sessionRepository
.entities()
.stream()
.filter(e -> e.getType() == HelenusEntityType.TABLE)
.forEach(e -> tableOps.validateTable(getTableMetadata(e), e)); .forEach(e -> tableOps.validateTable(getTableMetadata(e), e));
break; break;
@ -315,13 +344,22 @@ public final class SessionInitializer extends AbstractSessionOperations {
case UPDATE: case UPDATE:
eachUserTypeInOrder(userTypeOps, e -> userTypeOps.updateUserType(getUserType(e), e)); eachUserTypeInOrder(userTypeOps, e -> userTypeOps.updateUserType(getUserType(e), e));
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.VIEW) sessionRepository
.entities()
.stream()
.filter(e -> e.getType() == HelenusEntityType.VIEW)
.forEach(e -> tableOps.dropView(e)); .forEach(e -> tableOps.dropView(e));
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.TABLE) sessionRepository
.entities()
.stream()
.filter(e -> e.getType() == HelenusEntityType.TABLE)
.forEach(e -> tableOps.updateTable(getTableMetadata(e), e)); .forEach(e -> tableOps.updateTable(getTableMetadata(e), e));
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.VIEW) sessionRepository
.entities()
.stream()
.filter(e -> e.getType() == HelenusEntityType.VIEW)
.forEach(e -> tableOps.createView(e)); .forEach(e -> tableOps.createView(e));
break; break;
} }
@ -333,27 +371,41 @@ public final class SessionInitializer extends AbstractSessionOperations {
} }
} }
private void eachUserTypeInOrder(UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) { private void eachUserTypeInOrder(
UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) {
Set<HelenusEntity> processedSet = new HashSet<HelenusEntity>(); Set<HelenusEntity> processedSet = new HashSet<HelenusEntity>();
Set<HelenusEntity> stack = new HashSet<HelenusEntity>(); Set<HelenusEntity> stack = new HashSet<HelenusEntity>();
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.UDT).forEach(e -> { sessionRepository
.entities()
.stream()
.filter(e -> e.getType() == HelenusEntityType.UDT)
.forEach(
e -> {
stack.clear(); stack.clear();
eachUserTypeInRecursion(e, processedSet, stack, userTypeOps, action); eachUserTypeInRecursion(e, processedSet, stack, userTypeOps, action);
}); });
} }
private void eachUserTypeInReverseOrder(UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) { private void eachUserTypeInReverseOrder(
UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) {
ArrayDeque<HelenusEntity> deque = new ArrayDeque<>(); ArrayDeque<HelenusEntity> deque = new ArrayDeque<>();
eachUserTypeInOrder(userTypeOps, e -> deque.addFirst(e)); eachUserTypeInOrder(userTypeOps, e -> deque.addFirst(e));
deque.stream().forEach(e -> { deque
.stream()
.forEach(
e -> {
action.accept(e); action.accept(e);
}); });
} }
private void eachUserTypeInRecursion(HelenusEntity e, Set<HelenusEntity> processedSet, Set<HelenusEntity> stack, private void eachUserTypeInRecursion(
UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) { HelenusEntity e,
Set<HelenusEntity> processedSet,
Set<HelenusEntity> stack,
UserTypeOperations userTypeOps,
Consumer<? super HelenusEntity> action) {
stack.add(e); stack.add(e);
@ -374,7 +426,8 @@ public final class SessionInitializer extends AbstractSessionOperations {
private KeyspaceMetadata getKeyspaceMetadata() { private KeyspaceMetadata getKeyspaceMetadata() {
if (keyspaceMetadata == null) { if (keyspaceMetadata == null) {
keyspaceMetadata = session.getCluster().getMetadata().getKeyspace(usingKeyspace.toLowerCase()); keyspaceMetadata =
session.getCluster().getMetadata().getKeyspace(usingKeyspace.toLowerCase());
} }
return keyspaceMetadata; return keyspaceMetadata;
} }

View file

@ -15,11 +15,9 @@
*/ */
package net.helenus.core; package net.helenus.core;
import java.util.Collection;
import com.datastax.driver.core.UserType; import com.datastax.driver.core.UserType;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import java.util.Collection;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
public final class SessionRepository { public final class SessionRepository {
@ -32,7 +30,8 @@ public final class SessionRepository {
userTypeMap = ImmutableMap.<String, UserType>builder().putAll(builder.getUserTypeMap()).build(); userTypeMap = ImmutableMap.<String, UserType>builder().putAll(builder.getUserTypeMap()).build();
entityMap = ImmutableMap.<Class<?>, HelenusEntity>builder().putAll(builder.getEntityMap()).build(); entityMap =
ImmutableMap.<Class<?>, HelenusEntity>builder().putAll(builder.getEntityMap()).build();
} }
public UserType findUserType(String name) { public UserType findUserType(String name) {

View file

@ -15,17 +15,15 @@
*/ */
package net.helenus.core; package net.helenus.core;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import com.datastax.driver.core.Session; import com.datastax.driver.core.Session;
import com.datastax.driver.core.UDTValue; import com.datastax.driver.core.UDTValue;
import com.datastax.driver.core.UserType; import com.datastax.driver.core.UserType;
import com.google.common.collect.HashMultimap; import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap; import com.google.common.collect.Multimap;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.HelenusEntityType; import net.helenus.mapping.HelenusEntityType;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
@ -35,7 +33,8 @@ import net.helenus.support.HelenusMappingException;
public final class SessionRepositoryBuilder { public final class SessionRepositoryBuilder {
private static final Optional<HelenusEntityType> OPTIONAL_UDT = Optional.of(HelenusEntityType.UDT); private static final Optional<HelenusEntityType> OPTIONAL_UDT =
Optional.of(HelenusEntityType.UDT);
private final Map<Class<?>, HelenusEntity> entityMap = new HashMap<Class<?>, HelenusEntity>(); private final Map<Class<?>, HelenusEntity> entityMap = new HashMap<Class<?>, HelenusEntity>();
@ -99,7 +98,8 @@ public final class SessionRepositoryBuilder {
entity = helenusEntity; entity = helenusEntity;
if (type.isPresent() && entity.getType() != type.get()) { if (type.isPresent() && entity.getType() != type.get()) {
throw new HelenusMappingException("unexpected entity type " + entity.getType() + " for " + entity); throw new HelenusMappingException(
"unexpected entity type " + entity.getType() + " for " + entity);
} }
HelenusEntity concurrentEntity = entityMap.putIfAbsent(iface, entity); HelenusEntity concurrentEntity = entityMap.putIfAbsent(iface, entity);

View file

@ -15,11 +15,9 @@
*/ */
package net.helenus.core; package net.helenus.core;
import java.util.List;
import com.datastax.driver.core.TableMetadata; import com.datastax.driver.core.TableMetadata;
import com.datastax.driver.core.schemabuilder.SchemaStatement; import com.datastax.driver.core.schemabuilder.SchemaStatement;
import java.util.List;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
import net.helenus.support.HelenusException; import net.helenus.support.HelenusException;
@ -29,7 +27,8 @@ public final class TableOperations {
private final boolean dropUnusedColumns; private final boolean dropUnusedColumns;
private final boolean dropUnusedIndexes; private final boolean dropUnusedIndexes;
public TableOperations(AbstractSessionOperations sessionOps, boolean dropUnusedColumns, boolean dropUnusedIndexes) { public TableOperations(
AbstractSessionOperations sessionOps, boolean dropUnusedColumns, boolean dropUnusedIndexes) {
this.sessionOps = sessionOps; this.sessionOps = sessionOps;
this.dropUnusedColumns = dropUnusedColumns; this.dropUnusedColumns = dropUnusedColumns;
this.dropUnusedIndexes = dropUnusedIndexes; this.dropUnusedIndexes = dropUnusedIndexes;
@ -48,7 +47,10 @@ public final class TableOperations {
if (tmd == null) { if (tmd == null) {
throw new HelenusException( throw new HelenusException(
"table does not exists " + entity.getName() + "for entity " + entity.getMappingInterface()); "table does not exists "
+ entity.getName()
+ "for entity "
+ entity.getMappingInterface());
} }
List<SchemaStatement> list = SchemaUtil.alterTable(tmd, entity, dropUnusedColumns); List<SchemaStatement> list = SchemaUtil.alterTable(tmd, entity, dropUnusedColumns);
@ -57,7 +59,10 @@ public final class TableOperations {
if (!list.isEmpty()) { if (!list.isEmpty()) {
throw new HelenusException( throw new HelenusException(
"schema changed for entity " + entity.getMappingInterface() + ", apply this command: " + list); "schema changed for entity "
+ entity.getMappingInterface()
+ ", apply this command: "
+ list);
} }
} }
@ -73,14 +78,18 @@ public final class TableOperations {
public void createView(HelenusEntity entity) { public void createView(HelenusEntity entity) {
sessionOps.execute( sessionOps.execute(
SchemaUtil.createMaterializedView(sessionOps.usingKeyspace(), entity.getName().toCql(), entity), true); SchemaUtil.createMaterializedView(
sessionOps.usingKeyspace(), entity.getName().toCql(), entity),
true);
// executeBatch(SchemaUtil.createIndexes(entity)); NOTE: Unfortunately C* 3.10 // executeBatch(SchemaUtil.createIndexes(entity)); NOTE: Unfortunately C* 3.10
// does not yet support 2i on materialized views. // does not yet support 2i on materialized views.
} }
public void dropView(HelenusEntity entity) { public void dropView(HelenusEntity entity) {
sessionOps.execute( sessionOps.execute(
SchemaUtil.dropMaterializedView(sessionOps.usingKeyspace(), entity.getName().toCql(), entity), true); SchemaUtil.dropMaterializedView(
sessionOps.usingKeyspace(), entity.getName().toCql(), entity),
true);
} }
public void updateView(TableMetadata tmd, HelenusEntity entity) { public void updateView(TableMetadata tmd, HelenusEntity entity) {
@ -95,7 +104,8 @@ public final class TableOperations {
private void executeBatch(List<SchemaStatement> list) { private void executeBatch(List<SchemaStatement> list) {
list.forEach(s -> { list.forEach(
s -> {
sessionOps.execute(s, true); sessionOps.execute(s, true);
}); });
} }

View file

@ -15,11 +15,9 @@
*/ */
package net.helenus.core; package net.helenus.core;
import com.google.common.base.Stopwatch;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
import com.google.common.base.Stopwatch;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
public interface UnitOfWork<X extends Exception> extends AutoCloseable { public interface UnitOfWork<X extends Exception> extends AutoCloseable {
@ -35,19 +33,16 @@ public interface UnitOfWork<X extends Exception> extends AutoCloseable {
void addNestedUnitOfWork(UnitOfWork<X> uow); void addNestedUnitOfWork(UnitOfWork<X> uow);
/** /**
* Checks to see if the work performed between calling begin and now can be * Checks to see if the work performed between calling begin and now can be committed or not.
* committed or not.
* *
* @return a function from which to chain work that only happens when commit is * @return a function from which to chain work that only happens when commit is successful
* successful * @throws X when the work overlaps with other concurrent writers.
* @throws X
* when the work overlaps with other concurrent writers.
*/ */
PostCommitFunction<Void, Void> commit() throws X; PostCommitFunction<Void, Void> commit() throws X;
/** /**
* Explicitly abort the work within this unit of work. Any nested aborted unit * Explicitly abort the work within this unit of work. Any nested aborted unit of work will
* of work will trigger the entire unit of work to commit. * trigger the entire unit of work to commit.
*/ */
void abort(); void abort();
@ -62,12 +57,15 @@ public interface UnitOfWork<X extends Exception> extends AutoCloseable {
List<Facet> cacheEvict(List<Facet> facets); List<Facet> cacheEvict(List<Facet> facets);
String getPurpose(); String getPurpose();
UnitOfWork setPurpose(String purpose); UnitOfWork setPurpose(String purpose);
void setInfo(String info);
void addDatabaseTime(String name, Stopwatch amount); void addDatabaseTime(String name, Stopwatch amount);
void addCacheLookupTime(Stopwatch amount); void addCacheLookupTime(Stopwatch amount);
// Cache > 0 means "cache hit", < 0 means cache miss. // Cache > 0 means "cache hit", < 0 means cache miss.
void recordCacheAndDatabaseOperationCount(int cache, int database); void recordCacheAndDatabaseOperationCount(int cache, int database);
} }

View file

@ -15,11 +15,9 @@
*/ */
package net.helenus.core; package net.helenus.core;
import java.util.List;
import com.datastax.driver.core.UserType; import com.datastax.driver.core.UserType;
import com.datastax.driver.core.schemabuilder.SchemaStatement; import com.datastax.driver.core.schemabuilder.SchemaStatement;
import java.util.List;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
import net.helenus.support.HelenusException; import net.helenus.support.HelenusException;
@ -54,7 +52,10 @@ public final class UserTypeOperations {
if (!list.isEmpty()) { if (!list.isEmpty()) {
throw new HelenusException( throw new HelenusException(
"schema changed for entity " + entity.getMappingInterface() + ", apply this command: " + list); "schema changed for entity "
+ entity.getMappingInterface()
+ ", apply this command: "
+ list);
} }
} }
@ -70,7 +71,8 @@ public final class UserTypeOperations {
private void executeBatch(List<SchemaStatement> list) { private void executeBatch(List<SchemaStatement> list) {
list.forEach(s -> { list.forEach(
s -> {
sessionOps.execute(s, true); sessionOps.execute(s, true);
}); });
} }

View file

@ -22,5 +22,4 @@ import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE) @Target(ElementType.TYPE)
public @interface Cacheable { public @interface Cacheable {}
}

View file

@ -21,14 +21,15 @@ import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy; import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target; import java.lang.annotation.Target;
import java.util.concurrent.TimeoutException; import java.util.concurrent.TimeoutException;
import net.helenus.core.ConflictingUnitOfWorkException; import net.helenus.core.ConflictingUnitOfWorkException;
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD) @Target(ElementType.METHOD)
public @interface Retry { public @interface Retry {
Class<? extends Exception>[] on() default {ConflictingUnitOfWorkException.class, TimeoutException.class}; Class<? extends Exception>[] on() default {
ConflictingUnitOfWorkException.class, TimeoutException.class
};
int times() default 3; int times() default 3;
} }

View file

@ -18,7 +18,7 @@ package net.helenus.core.aspect;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.Arrays; import java.util.Arrays;
import net.helenus.core.annotation.Retry;
import org.apache.commons.lang3.exception.ExceptionUtils; import org.apache.commons.lang3.exception.ExceptionUtils;
import org.aspectj.lang.ProceedingJoinPoint; import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around; import org.aspectj.lang.annotation.Around;
@ -29,8 +29,6 @@ import org.slf4j.LoggerFactory;
import org.springframework.core.annotation.AnnotationUtils; import org.springframework.core.annotation.AnnotationUtils;
import org.springframework.util.Assert; import org.springframework.util.Assert;
import net.helenus.core.annotation.Retry;
@Aspect @Aspect
public class RetryAspect { public class RetryAspect {
@ -55,8 +53,8 @@ public class RetryAspect {
return tryProceeding(pjp, times, retryOn); return tryProceeding(pjp, times, retryOn);
} }
private Object tryProceeding(ProceedingJoinPoint pjp, int times, Class<? extends Throwable>[] retryOn) private Object tryProceeding(
throws Throwable { ProceedingJoinPoint pjp, int times, Class<? extends Throwable>[] retryOn) throws Throwable {
try { try {
return proceed(pjp); return proceed(pjp);
} catch (Throwable throwable) { } catch (Throwable throwable) {

View file

@ -2,7 +2,7 @@ package net.helenus.core.aspect;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.Arrays; import java.util.Arrays;
import net.helenus.core.annotation.Retry;
import org.apache.commons.lang3.exception.ExceptionUtils; import org.apache.commons.lang3.exception.ExceptionUtils;
import org.aspectj.lang.ProceedingJoinPoint; import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around; import org.aspectj.lang.annotation.Around;
@ -13,8 +13,6 @@ import org.slf4j.LoggerFactory;
import org.springframework.core.annotation.AnnotationUtils; import org.springframework.core.annotation.AnnotationUtils;
import org.springframework.util.Assert; import org.springframework.util.Assert;
import net.helenus.core.annotation.Retry;
@Aspect @Aspect
public class RetryConcurrentUnitOfWorkAspect { public class RetryConcurrentUnitOfWorkAspect {
@ -39,8 +37,8 @@ public class RetryConcurrentUnitOfWorkAspect {
return tryProceeding(pjp, times, retryOn); return tryProceeding(pjp, times, retryOn);
} }
private Object tryProceeding(ProceedingJoinPoint pjp, int times, Class<? extends Throwable>[] retryOn) private Object tryProceeding(
throws Throwable { ProceedingJoinPoint pjp, int times, Class<? extends Throwable>[] retryOn) throws Throwable {
try { try {
return proceed(pjp); return proceed(pjp);
} catch (Throwable throwable) { } catch (Throwable throwable) {

View file

@ -18,7 +18,6 @@ package net.helenus.core.cache;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
public class BoundFacet extends Facet<String> { public class BoundFacet extends Facet<String> {
@ -31,15 +30,25 @@ public class BoundFacet extends Facet<String> {
} }
public BoundFacet(String name, Map<HelenusProperty, Object> properties) { public BoundFacet(String name, Map<HelenusProperty, Object> properties) {
super(name, super(
name,
(properties.keySet().size() > 1) (properties.keySet().size() > 1)
? "[" + String.join(", ", ? "["
properties.keySet().stream().map(key -> properties.get(key).toString()) + String.join(
", ",
properties
.keySet()
.stream()
.map(key -> properties.get(key).toString())
.collect(Collectors.toSet())) .collect(Collectors.toSet()))
+ "]" + "]"
: String.join("", properties.keySet().stream().map(key -> properties.get(key).toString()) : String.join(
"",
properties
.keySet()
.stream()
.map(key -> properties.get(key).toString())
.collect(Collectors.toSet()))); .collect(Collectors.toSet())));
this.properties = properties; this.properties = properties;
} }
} }

View file

@ -8,8 +8,7 @@ public class CacheUtil {
public static List<String[]> combinations(List<String> items) { public static List<String[]> combinations(List<String> items) {
int n = items.size(); int n = items.size();
if (n > 20 || n < 0) if (n > 20) throw new IllegalArgumentException(n + " is out of range");
throw new IllegalArgumentException(n + " is out of range");
long e = Math.round(Math.pow(2, n)); long e = Math.round(Math.pow(2, n));
List<String[]> out = new ArrayList<String[]>((int) e - 1); List<String[]> out = new ArrayList<String[]>((int) e - 1);
for (int k = 1; k <= items.size(); k++) { for (int k = 1; k <= items.size(); k++) {
@ -18,7 +17,8 @@ public class CacheUtil {
return out; return out;
} }
private static void kCombinations(List<String> items, int n, int k, String[] arr, List<String[]> out) { private static void kCombinations(
List<String> items, int n, int k, String[] arr, List<String[]> out) {
if (k == 0) { if (k == 0) {
out.add(arr.clone()); out.add(arr.clone());
} else { } else {
@ -30,10 +30,17 @@ public class CacheUtil {
} }
public static List<String[]> flattenFacets(List<Facet> facets) { public static List<String[]> flattenFacets(List<Facet> facets) {
List<String[]> combinations = CacheUtil.combinations( List<String[]> combinations =
facets.stream().filter(facet -> !facet.fixed()).filter(facet -> facet.value() != null).map(facet -> { CacheUtil.combinations(
facets
.stream()
.filter(facet -> !facet.fixed())
.filter(facet -> facet.value() != null)
.map(
facet -> {
return facet.name() + "==" + facet.value(); return facet.name() + "==" + facet.value();
}).collect(Collectors.toList())); })
.collect(Collectors.toList()));
return combinations; return combinations;
} }
@ -57,8 +64,10 @@ public class CacheUtil {
} }
public static String schemaName(List<Facet> facets) { public static String schemaName(List<Facet> facets) {
return facets.stream().filter(Facet::fixed).map(facet -> facet.value().toString()) return facets
.stream()
.filter(Facet::fixed)
.map(facet -> facet.value().toString())
.collect(Collectors.joining(".")); .collect(Collectors.joining("."));
} }
} }

View file

@ -16,9 +16,7 @@
package net.helenus.core.cache; package net.helenus.core.cache;
/** /** An Entity is identifiable via one or more Facets */
* An Entity is identifiable via one or more Facets
*/
public class Facet<T> { public class Facet<T> {
private final String name; private final String name;
private T value; private T value;
@ -49,5 +47,4 @@ public class Facet<T> {
public boolean fixed() { public boolean fixed() {
return fixed; return fixed;
} }
} }

View file

@ -40,5 +40,4 @@ public class GuavaCache<K, V> implements SessionCache<K, V> {
public void put(K key, V value) { public void put(K key, V value) {
cache.put(key, value); cache.put(key, value);
} }
} }

View file

@ -16,21 +16,45 @@
package net.helenus.core.cache; package net.helenus.core.cache;
import java.util.concurrent.TimeUnit;
import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheBuilder;
import com.google.common.cache.RemovalListener;
import com.google.common.cache.RemovalNotification;
import java.util.concurrent.TimeUnit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public interface SessionCache<K, V> { public interface SessionCache<K, V> {
static final Logger LOG = LoggerFactory.getLogger(SessionCache.class);
static <K, V> SessionCache<K, V> defaultCache() { static <K, V> SessionCache<K, V> defaultCache() {
int MAX_CACHE_SIZE = 10000; GuavaCache<K, V> cache;
int MAX_CACHE_EXPIRE_SECONDS = 600; RemovalListener<K, V> listener =
return new GuavaCache<K, V>(CacheBuilder.newBuilder().maximumSize(MAX_CACHE_SIZE) new RemovalListener<K, V>() {
.expireAfterAccess(MAX_CACHE_EXPIRE_SECONDS, TimeUnit.SECONDS) @Override
.expireAfterWrite(MAX_CACHE_EXPIRE_SECONDS, TimeUnit.SECONDS).recordStats().build()); public void onRemoval(RemovalNotification<K, V> n) {
if (n.wasEvicted()) {
String cause = n.getCause().name();
LOG.info(cause);
}
}
};
cache =
new GuavaCache<K, V>(
CacheBuilder.newBuilder()
.maximumSize(25_000)
.expireAfterAccess(5, TimeUnit.MINUTES)
.softValues()
.removalListener(listener)
.build());
return cache;
} }
void invalidate(K key); void invalidate(K key);
V get(K key); V get(K key);
void put(K key, V value); void put(K key, V value);
} }

View file

@ -19,7 +19,6 @@ import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import net.helenus.core.SchemaUtil; import net.helenus.core.SchemaUtil;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;

View file

@ -16,15 +16,13 @@
package net.helenus.core.operation; package net.helenus.core.operation;
import java.util.*; import java.util.*;
import net.helenus.core.*; import net.helenus.core.*;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
import net.helenus.core.cache.UnboundFacet; import net.helenus.core.cache.UnboundFacet;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
public abstract class AbstractFilterOperation<E, O extends AbstractFilterOperation<E, O>> public abstract class AbstractFilterOperation<E, O extends AbstractFilterOperation<E, O>>
extends extends AbstractOperation<E, O> {
AbstractOperation<E, O> {
protected List<Filter<?>> filters = null; protected List<Filter<?>> filters = null;
protected List<Filter<?>> ifFilters = null; protected List<Filter<?>> ifFilters = null;
@ -133,7 +131,6 @@ public abstract class AbstractFilterOperation<E, O extends AbstractFilterOperati
} }
} }
} }
} }
if (binder.isBound()) { if (binder.isBound()) {
boundFacets.add(binder.bind()); boundFacets.add(binder.bind());

View file

@ -19,13 +19,12 @@ import java.util.LinkedHashMap;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import net.helenus.core.*; import net.helenus.core.*;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
public abstract class AbstractFilterOptionalOperation<E, O extends AbstractFilterOptionalOperation<E, O>> public abstract class AbstractFilterOptionalOperation<
extends E, O extends AbstractFilterOptionalOperation<E, O>>
AbstractOptionalOperation<E, O> { extends AbstractOptionalOperation<E, O> {
protected Map<HelenusProperty, Filter<?>> filters = null; protected Map<HelenusProperty, Filter<?>> filters = null;
protected List<Filter<?>> ifFilters = null; protected List<Filter<?>> ifFilters = null;

View file

@ -19,13 +19,12 @@ import java.util.LinkedHashMap;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import net.helenus.core.*; import net.helenus.core.*;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
public abstract class AbstractFilterStreamOperation<E, O extends AbstractFilterStreamOperation<E, O>> public abstract class AbstractFilterStreamOperation<
extends E, O extends AbstractFilterStreamOperation<E, O>>
AbstractStreamOperation<E, O> { extends AbstractStreamOperation<E, O> {
protected Map<HelenusProperty, Filter<?>> filters = null; protected Map<HelenusProperty, Filter<?>> filters = null;
protected List<Filter<?>> ifFilters = null; protected List<Filter<?>> ifFilters = null;

View file

@ -15,17 +15,16 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import com.codahale.metrics.Timer;
import com.datastax.driver.core.ResultSet;
import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException; import java.util.concurrent.CompletionException;
import java.util.concurrent.TimeoutException; import java.util.concurrent.TimeoutException;
import com.codahale.metrics.Timer;
import com.datastax.driver.core.ResultSet;
import net.helenus.core.AbstractSessionOperations; import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.UnitOfWork; import net.helenus.core.UnitOfWork;
public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>> extends AbstractStatementOperation<E, O> { public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>>
extends AbstractStatementOperation<E, O> {
public AbstractOperation(AbstractSessionOperations sessionOperations) { public AbstractOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations); super(sessionOperations);
@ -40,8 +39,15 @@ public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>> ex
public E sync() throws TimeoutException { public E sync() throws TimeoutException {
final Timer.Context context = requestLatency.time(); final Timer.Context context = requestLatency.time();
try { try {
ResultSet resultSet = this.execute(sessionOps, null, traceContext, queryExecutionTimeout, queryTimeoutUnits, ResultSet resultSet =
showValues, false); this.execute(
sessionOps,
null,
traceContext,
queryExecutionTimeout,
queryTimeoutUnits,
showValues,
false);
return transform(resultSet); return transform(resultSet);
} finally { } finally {
context.stop(); context.stop();
@ -49,13 +55,19 @@ public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>> ex
} }
public E sync(UnitOfWork uow) throws TimeoutException { public E sync(UnitOfWork uow) throws TimeoutException {
if (uow == null) if (uow == null) return sync();
return sync();
final Timer.Context context = requestLatency.time(); final Timer.Context context = requestLatency.time();
try { try {
ResultSet resultSet = execute(sessionOps, uow, traceContext, queryExecutionTimeout, queryTimeoutUnits, ResultSet resultSet =
showValues, true); execute(
sessionOps,
uow,
traceContext,
queryExecutionTimeout,
queryTimeoutUnits,
showValues,
true);
E result = transform(resultSet); E result = transform(resultSet);
return result; return result;
} finally { } finally {
@ -64,7 +76,8 @@ public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>> ex
} }
public CompletableFuture<E> async() { public CompletableFuture<E> async() {
return CompletableFuture.<E>supplyAsync(() -> { return CompletableFuture.<E>supplyAsync(
() -> {
try { try {
return sync(); return sync();
} catch (TimeoutException ex) { } catch (TimeoutException ex) {
@ -74,9 +87,9 @@ public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>> ex
} }
public CompletableFuture<E> async(UnitOfWork uow) { public CompletableFuture<E> async(UnitOfWork uow) {
if (uow == null) if (uow == null) return async();
return async(); return CompletableFuture.<E>supplyAsync(
return CompletableFuture.<E>supplyAsync(() -> { () -> {
try { try {
return sync(); return sync();
} catch (TimeoutException ex) { } catch (TimeoutException ex) {

View file

@ -17,12 +17,6 @@ package net.helenus.core.operation;
import static net.helenus.core.HelenusSession.deleted; import static net.helenus.core.HelenusSession.deleted;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.concurrent.TimeoutException;
import com.codahale.metrics.Timer; import com.codahale.metrics.Timer;
import com.datastax.driver.core.PreparedStatement; import com.datastax.driver.core.PreparedStatement;
import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.ResultSet;
@ -30,15 +24,18 @@ import com.google.common.base.Function;
import com.google.common.base.Stopwatch; import com.google.common.base.Stopwatch;
import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListenableFuture;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.concurrent.TimeoutException;
import net.helenus.core.AbstractSessionOperations; import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.UnitOfWork; import net.helenus.core.UnitOfWork;
import net.helenus.core.cache.CacheUtil; import net.helenus.core.cache.CacheUtil;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOperation<E, O>> public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOperation<E, O>>
extends extends AbstractStatementOperation<E, O> {
AbstractStatementOperation<E, O> {
public AbstractOptionalOperation(AbstractSessionOperations sessionOperations) { public AbstractOptionalOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations); super(sessionOperations);
@ -52,7 +49,8 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
public ListenableFuture<PreparedOptionalOperation<E>> prepareAsync() { public ListenableFuture<PreparedOptionalOperation<E>> prepareAsync() {
final O _this = (O) this; final O _this = (O) this;
return Futures.transform(prepareStatementAsync(), return Futures.transform(
prepareStatementAsync(),
new Function<PreparedStatement, PreparedOptionalOperation<E>>() { new Function<PreparedStatement, PreparedOptionalOperation<E>>() {
@Override @Override
public PreparedOptionalOperation<E> apply(PreparedStatement preparedStatement) { public PreparedOptionalOperation<E> apply(PreparedStatement preparedStatement) {
@ -85,8 +83,15 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
if (!result.isPresent()) { if (!result.isPresent()) {
// Formulate the query and execute it against the Cassandra cluster. // Formulate the query and execute it against the Cassandra cluster.
ResultSet resultSet = this.execute(sessionOps, null, traceContext, queryExecutionTimeout, ResultSet resultSet =
queryTimeoutUnits, showValues, false); this.execute(
sessionOps,
null,
traceContext,
queryExecutionTimeout,
queryTimeoutUnits,
showValues,
false);
// Transform the query result set into the desired shape. // Transform the query result set into the desired shape.
result = transform(resultSet); result = transform(resultSet);
@ -105,8 +110,7 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
} }
public Optional<E> sync(UnitOfWork<?> uow) throws TimeoutException { public Optional<E> sync(UnitOfWork<?> uow) throws TimeoutException {
if (uow == null) if (uow == null) return sync();
return sync();
final Timer.Context context = requestLatency.time(); final Timer.Context context = requestLatency.time();
try { try {
@ -166,8 +170,15 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
} else { } else {
// Formulate the query and execute it against the Cassandra cluster. // Formulate the query and execute it against the Cassandra cluster.
ResultSet resultSet = execute(sessionOps, uow, traceContext, queryExecutionTimeout, queryTimeoutUnits, ResultSet resultSet =
showValues, true); execute(
sessionOps,
uow,
traceContext,
queryExecutionTimeout,
queryTimeoutUnits,
showValues,
true);
// Transform the query result set into the desired shape. // Transform the query result set into the desired shape.
result = transform(resultSet); result = transform(resultSet);
@ -186,7 +197,8 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
} }
public CompletableFuture<Optional<E>> async() { public CompletableFuture<Optional<E>> async() {
return CompletableFuture.<Optional<E>>supplyAsync(() -> { return CompletableFuture.<Optional<E>>supplyAsync(
() -> {
try { try {
return sync(); return sync();
} catch (TimeoutException ex) { } catch (TimeoutException ex) {
@ -196,9 +208,9 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
} }
public CompletableFuture<Optional<E>> async(UnitOfWork<?> uow) { public CompletableFuture<Optional<E>> async(UnitOfWork<?> uow) {
if (uow == null) if (uow == null) return async();
return async(); return CompletableFuture.<Optional<E>>supplyAsync(
return CompletableFuture.<Optional<E>>supplyAsync(() -> { () -> {
try { try {
return sync(); return sync();
} catch (TimeoutException ex) { } catch (TimeoutException ex) {

View file

@ -15,12 +15,8 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import java.util.ArrayList; import brave.Tracer;
import java.util.List; import brave.propagation.TraceContext;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
import com.datastax.driver.core.ConsistencyLevel; import com.datastax.driver.core.ConsistencyLevel;
import com.datastax.driver.core.PreparedStatement; import com.datastax.driver.core.PreparedStatement;
import com.datastax.driver.core.RegularStatement; import com.datastax.driver.core.RegularStatement;
@ -31,9 +27,11 @@ import com.datastax.driver.core.policies.FallthroughRetryPolicy;
import com.datastax.driver.core.policies.RetryPolicy; import com.datastax.driver.core.policies.RetryPolicy;
import com.datastax.driver.core.querybuilder.BuiltStatement; import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListenableFuture;
import java.util.ArrayList;
import brave.Tracer; import java.util.List;
import brave.propagation.TraceContext; import java.util.Map;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
import net.helenus.core.AbstractSessionOperations; import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.UnitOfWork; import net.helenus.core.UnitOfWork;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
@ -43,7 +41,8 @@ import net.helenus.mapping.HelenusProperty;
import net.helenus.mapping.value.BeanColumnValueProvider; import net.helenus.mapping.value.BeanColumnValueProvider;
import net.helenus.support.HelenusException; import net.helenus.support.HelenusException;
public abstract class AbstractStatementOperation<E, O extends AbstractStatementOperation<E, O>> extends Operation<E> { public abstract class AbstractStatementOperation<E, O extends AbstractStatementOperation<E, O>>
extends Operation<E> {
protected boolean checkCache = true; protected boolean checkCache = true;
protected boolean showValues = true; protected boolean showValues = true;
@ -277,8 +276,7 @@ public abstract class AbstractStatementOperation<E, O extends AbstractStatementO
public String cql() { public String cql() {
Statement statement = buildStatement(false); Statement statement = buildStatement(false);
if (statement == null) if (statement == null) return "";
return "";
if (statement instanceof BuiltStatement) { if (statement instanceof BuiltStatement) {
BuiltStatement buildStatement = (BuiltStatement) statement; BuiltStatement buildStatement = (BuiltStatement) statement;
return buildStatement.setForceNoValues(true).getQueryString(); return buildStatement.setForceNoValues(true).getQueryString();
@ -331,7 +329,8 @@ public abstract class AbstractStatementOperation<E, O extends AbstractStatementO
protected void cacheUpdate(UnitOfWork<?> uow, E pojo, List<Facet> identifyingFacets) { protected void cacheUpdate(UnitOfWork<?> uow, E pojo, List<Facet> identifyingFacets) {
List<Facet> facets = new ArrayList<>(); List<Facet> facets = new ArrayList<>();
Map<String, Object> valueMap = pojo instanceof MapExportable ? ((MapExportable) pojo).toMap() : null; Map<String, Object> valueMap =
pojo instanceof MapExportable ? ((MapExportable) pojo).toMap() : null;
for (Facet facet : identifyingFacets) { for (Facet facet : identifyingFacets) {
if (facet instanceof UnboundFacet) { if (facet instanceof UnboundFacet) {

View file

@ -17,13 +17,6 @@ package net.helenus.core.operation;
import static net.helenus.core.HelenusSession.deleted; import static net.helenus.core.HelenusSession.deleted;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.concurrent.TimeoutException;
import java.util.stream.Stream;
import com.codahale.metrics.Timer; import com.codahale.metrics.Timer;
import com.datastax.driver.core.PreparedStatement; import com.datastax.driver.core.PreparedStatement;
import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.ResultSet;
@ -31,15 +24,19 @@ import com.google.common.base.Function;
import com.google.common.base.Stopwatch; import com.google.common.base.Stopwatch;
import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListenableFuture;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.concurrent.TimeoutException;
import java.util.stream.Stream;
import net.helenus.core.AbstractSessionOperations; import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.UnitOfWork; import net.helenus.core.UnitOfWork;
import net.helenus.core.cache.CacheUtil; import net.helenus.core.cache.CacheUtil;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperation<E, O>> public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperation<E, O>>
extends extends AbstractStatementOperation<E, O> {
AbstractStatementOperation<E, O> {
public AbstractStreamOperation(AbstractSessionOperations sessionOperations) { public AbstractStreamOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations); super(sessionOperations);
@ -53,7 +50,8 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
public ListenableFuture<PreparedStreamOperation<E>> prepareAsync() { public ListenableFuture<PreparedStreamOperation<E>> prepareAsync() {
final O _this = (O) this; final O _this = (O) this;
return Futures.transform(prepareStatementAsync(), return Futures.transform(
prepareStatementAsync(),
new Function<PreparedStatement, PreparedStreamOperation<E>>() { new Function<PreparedStatement, PreparedStreamOperation<E>>() {
@Override @Override
public PreparedStreamOperation<E> apply(PreparedStatement preparedStatement) { public PreparedStreamOperation<E> apply(PreparedStatement preparedStatement) {
@ -86,8 +84,15 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
if (resultStream == null) { if (resultStream == null) {
// Formulate the query and execute it against the Cassandra cluster. // Formulate the query and execute it against the Cassandra cluster.
ResultSet resultSet = this.execute(sessionOps, null, traceContext, queryExecutionTimeout, ResultSet resultSet =
queryTimeoutUnits, showValues, false); this.execute(
sessionOps,
null,
traceContext,
queryExecutionTimeout,
queryTimeoutUnits,
showValues,
false);
// Transform the query result set into the desired shape. // Transform the query result set into the desired shape.
resultStream = transform(resultSet); resultStream = transform(resultSet);
@ -97,7 +102,8 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
List<Facet> facets = getFacets(); List<Facet> facets = getFacets();
if (facets != null && facets.size() > 1) { if (facets != null && facets.size() > 1) {
List<E> again = new ArrayList<>(); List<E> again = new ArrayList<>();
resultStream.forEach(result -> { resultStream.forEach(
result -> {
sessionOps.updateCache(result, facets); sessionOps.updateCache(result, facets);
again.add(result); again.add(result);
}); });
@ -112,8 +118,7 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
} }
public Stream<E> sync(UnitOfWork uow) throws TimeoutException { public Stream<E> sync(UnitOfWork uow) throws TimeoutException {
if (uow == null) if (uow == null) return sync();
return sync();
final Timer.Context context = requestLatency.time(); final Timer.Context context = requestLatency.time();
try { try {
@ -164,8 +169,15 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
// Check to see if we fetched the object from the cache // Check to see if we fetched the object from the cache
if (resultStream == null) { if (resultStream == null) {
ResultSet resultSet = execute(sessionOps, uow, traceContext, queryExecutionTimeout, queryTimeoutUnits, ResultSet resultSet =
showValues, true); execute(
sessionOps,
uow,
traceContext,
queryExecutionTimeout,
queryTimeoutUnits,
showValues,
true);
resultStream = transform(resultSet); resultStream = transform(resultSet);
} }
@ -174,7 +186,8 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
if (resultStream != null) { if (resultStream != null) {
List<E> again = new ArrayList<>(); List<E> again = new ArrayList<>();
List<Facet> facets = getFacets(); List<Facet> facets = getFacets();
resultStream.forEach(result -> { resultStream.forEach(
result -> {
if (result != deleted) { if (result != deleted) {
if (updateCache) { if (updateCache) {
cacheUpdate(uow, result, facets); cacheUpdate(uow, result, facets);
@ -192,7 +205,8 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
} }
public CompletableFuture<Stream<E>> async() { public CompletableFuture<Stream<E>> async() {
return CompletableFuture.<Stream<E>>supplyAsync(() -> { return CompletableFuture.<Stream<E>>supplyAsync(
() -> {
try { try {
return sync(); return sync();
} catch (TimeoutException ex) { } catch (TimeoutException ex) {
@ -202,9 +216,9 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
} }
public CompletableFuture<Stream<E>> async(UnitOfWork uow) { public CompletableFuture<Stream<E>> async(UnitOfWork uow) {
if (uow == null) if (uow == null) return async();
return async(); return CompletableFuture.<Stream<E>>supplyAsync(
return CompletableFuture.<Stream<E>>supplyAsync(() -> { () -> {
try { try {
return sync(); return sync();
} catch (TimeoutException ex) { } catch (TimeoutException ex) {

View file

@ -15,18 +15,19 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import java.util.Optional;
import com.datastax.driver.core.BoundStatement; import com.datastax.driver.core.BoundStatement;
import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.Statement; import com.datastax.driver.core.Statement;
import java.util.Optional;
public final class BoundOptionalOperation<E> extends AbstractOptionalOperation<E, BoundOptionalOperation<E>> { public final class BoundOptionalOperation<E>
extends AbstractOptionalOperation<E, BoundOptionalOperation<E>> {
private final BoundStatement boundStatement; private final BoundStatement boundStatement;
private final AbstractOptionalOperation<E, ?> delegate; private final AbstractOptionalOperation<E, ?> delegate;
public BoundOptionalOperation(BoundStatement boundStatement, AbstractOptionalOperation<E, ?> operation) { public BoundOptionalOperation(
BoundStatement boundStatement, AbstractOptionalOperation<E, ?> operation) {
super(operation.sessionOps); super(operation.sessionOps);
this.boundStatement = boundStatement; this.boundStatement = boundStatement;
this.delegate = operation; this.delegate = operation;

View file

@ -15,21 +15,21 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import java.util.List;
import java.util.stream.Stream;
import com.datastax.driver.core.BoundStatement; import com.datastax.driver.core.BoundStatement;
import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.Statement; import com.datastax.driver.core.Statement;
import java.util.List;
import java.util.stream.Stream;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
public final class BoundStreamOperation<E> extends AbstractStreamOperation<E, BoundStreamOperation<E>> { public final class BoundStreamOperation<E>
extends AbstractStreamOperation<E, BoundStreamOperation<E>> {
private final BoundStatement boundStatement; private final BoundStatement boundStatement;
private final AbstractStreamOperation<E, ?> delegate; private final AbstractStreamOperation<E, ?> delegate;
public BoundStreamOperation(BoundStatement boundStatement, AbstractStreamOperation<E, ?> operation) { public BoundStreamOperation(
BoundStatement boundStatement, AbstractStreamOperation<E, ?> operation) {
super(operation.sessionOps); super(operation.sessionOps);
this.boundStatement = boundStatement; this.boundStatement = boundStatement;
this.delegate = operation; this.delegate = operation;

View file

@ -20,7 +20,6 @@ import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.datastax.driver.core.querybuilder.QueryBuilder; import com.datastax.driver.core.querybuilder.QueryBuilder;
import com.datastax.driver.core.querybuilder.Select; import com.datastax.driver.core.querybuilder.Select;
import com.datastax.driver.core.querybuilder.Select.Where; import com.datastax.driver.core.querybuilder.Select.Where;
import net.helenus.core.AbstractSessionOperations; import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.Filter; import net.helenus.core.Filter;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
@ -74,8 +73,11 @@ public final class CountOperation extends AbstractFilterOperation<Long, CountOpe
if (entity == null) { if (entity == null) {
entity = p.getEntity(); entity = p.getEntity();
} else if (entity != p.getEntity()) { } else if (entity != p.getEntity()) {
throw new HelenusMappingException("you can count columns only in single entity " throw new HelenusMappingException(
+ entity.getMappingInterface() + " or " + p.getEntity().getMappingInterface()); "you can count columns only in single entity "
+ entity.getMappingInterface()
+ " or "
+ p.getEntity().getMappingInterface());
} }
} }
} }

View file

@ -15,15 +15,13 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import java.util.List;
import java.util.concurrent.TimeoutException;
import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement; import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.datastax.driver.core.querybuilder.Delete; import com.datastax.driver.core.querybuilder.Delete;
import com.datastax.driver.core.querybuilder.Delete.Where; import com.datastax.driver.core.querybuilder.Delete.Where;
import com.datastax.driver.core.querybuilder.QueryBuilder; import com.datastax.driver.core.querybuilder.QueryBuilder;
import java.util.List;
import java.util.concurrent.TimeoutException;
import net.helenus.core.AbstractSessionOperations; import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.Filter; import net.helenus.core.Filter;
import net.helenus.core.UnitOfWork; import net.helenus.core.UnitOfWork;
@ -123,8 +121,11 @@ public final class DeleteOperation extends AbstractFilterOperation<ResultSet, De
if (entity == null) { if (entity == null) {
entity = p.getEntity(); entity = p.getEntity();
} else if (entity != p.getEntity()) { } else if (entity != p.getEntity()) {
throw new HelenusMappingException("you can delete rows only in single entity " throw new HelenusMappingException(
+ entity.getMappingInterface() + " or " + p.getEntity().getMappingInterface()); "you can delete rows only in single entity "
+ entity.getMappingInterface()
+ " or "
+ p.getEntity().getMappingInterface());
} }
} }
@ -155,5 +156,4 @@ public final class DeleteOperation extends AbstractFilterOperation<ResultSet, De
public List<Facet> getFacets() { public List<Facet> getFacets() {
return entity.getFacets(); return entity.getFacets();
} }
} }

View file

@ -15,20 +15,19 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import java.util.*;
import java.util.concurrent.TimeoutException;
import java.util.function.Function;
import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement; import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.datastax.driver.core.querybuilder.Insert; import com.datastax.driver.core.querybuilder.Insert;
import com.datastax.driver.core.querybuilder.QueryBuilder; import com.datastax.driver.core.querybuilder.QueryBuilder;
import java.util.*;
import java.util.concurrent.TimeoutException;
import java.util.function.Function;
import net.helenus.core.AbstractSessionOperations; import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.Getter; import net.helenus.core.Getter;
import net.helenus.core.Helenus; import net.helenus.core.Helenus;
import net.helenus.core.UnitOfWork; import net.helenus.core.UnitOfWork;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
import net.helenus.core.cache.UnboundFacet;
import net.helenus.core.reflect.DefaultPrimitiveTypes; import net.helenus.core.reflect.DefaultPrimitiveTypes;
import net.helenus.core.reflect.Drafted; import net.helenus.core.reflect.Drafted;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
@ -42,7 +41,8 @@ import net.helenus.support.HelenusMappingException;
public final class InsertOperation<T> extends AbstractOperation<T, InsertOperation<T>> { public final class InsertOperation<T> extends AbstractOperation<T, InsertOperation<T>> {
private final List<Fun.Tuple2<HelenusPropertyNode, Object>> values = new ArrayList<Fun.Tuple2<HelenusPropertyNode, Object>>(); private final List<Fun.Tuple2<HelenusPropertyNode, Object>> values =
new ArrayList<Fun.Tuple2<HelenusPropertyNode, Object>>();
private final T pojo; private final T pojo;
private final Class<?> resultType; private final Class<?> resultType;
private HelenusEntity entity; private HelenusEntity entity;
@ -59,7 +59,8 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
this.resultType = ResultSet.class; this.resultType = ResultSet.class;
} }
public InsertOperation(AbstractSessionOperations sessionOperations, Class<?> resultType, boolean ifNotExists) { public InsertOperation(
AbstractSessionOperations sessionOperations, Class<?> resultType, boolean ifNotExists) {
super(sessionOperations); super(sessionOperations);
this.ifNotExists = ifNotExists; this.ifNotExists = ifNotExists;
@ -67,8 +68,12 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
this.resultType = resultType; this.resultType = resultType;
} }
public InsertOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity, T pojo, public InsertOperation(
Set<String> mutations, boolean ifNotExists) { AbstractSessionOperations sessionOperations,
HelenusEntity entity,
T pojo,
Set<String> mutations,
boolean ifNotExists) {
super(sessionOperations); super(sessionOperations);
this.entity = entity; this.entity = entity;
@ -134,8 +139,7 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
values.forEach(t -> addPropertyNode(t._1)); values.forEach(t -> addPropertyNode(t._1));
if (values.isEmpty()) if (values.isEmpty()) return null;
return null;
if (entity == null) { if (entity == null) {
throw new HelenusMappingException("unknown entity"); throw new HelenusMappingException("unknown entity");
@ -147,7 +151,8 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
insert.ifNotExists(); insert.ifNotExists();
} }
values.forEach(t -> { values.forEach(
t -> {
insert.value(t._1.getColumnName(), t._2); insert.value(t._1.getColumnName(), t._2);
}); });
@ -163,6 +168,10 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
@Override @Override
public T transform(ResultSet resultSet) { public T transform(ResultSet resultSet) {
if ((ifNotExists == true) && (resultSet.wasApplied() == false)) {
throw new HelenusException("Statement was not applied due to consistency constraints");
}
Class<?> iface = entity.getMappingInterface(); Class<?> iface = entity.getMappingInterface();
if (resultType == iface) { if (resultType == iface) {
if (values.size() > 0) { if (values.size() > 0) {
@ -179,8 +188,8 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
if (backingMap.containsKey(key)) { if (backingMap.containsKey(key)) {
// Some values man need to be converted (e.g. from String to Enum). This is done // Some values man need to be converted (e.g. from String to Enum). This is done
// within the BeanColumnValueProvider below. // within the BeanColumnValueProvider below.
Optional<Function<Object, Object>> converter = prop Optional<Function<Object, Object>> converter =
.getReadConverter(sessionOps.getSessionRepository()); prop.getReadConverter(sessionOps.getSessionRepository());
if (converter.isPresent()) { if (converter.isPresent()) {
backingMap.put(key, converter.get().apply(backingMap.get(key))); backingMap.put(key, converter.get().apply(backingMap.get(key)));
} }
@ -188,8 +197,8 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
// If we started this operation with an instance of this type, use values from // If we started this operation with an instance of this type, use values from
// that. // that.
if (pojo != null) { if (pojo != null) {
backingMap.put(key, backingMap.put(
BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, immutable)); key, BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, immutable));
} else { } else {
// Otherwise we'll use default values for the property type if available. // Otherwise we'll use default values for the property type if available.
Class<?> propType = prop.getJavaType(); Class<?> propType = prop.getJavaType();
@ -207,9 +216,7 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
// Lastly, create a new proxy object for the entity and return the new instance. // Lastly, create a new proxy object for the entity and return the new instance.
return (T) Helenus.map(iface, backingMap); return (T) Helenus.map(iface, backingMap);
} }
// Oddly, this insert didn't change any value so simply return the pojo. // Oddly, this insert didn't change anything so simply return the pojo.
// TODO(gburd): this pojo is the result of a Draft.build() call which will not
// preserve object identity (o1 == o2), ... fix me.
return (T) pojo; return (T) pojo;
} }
return (T) resultSet; return (T) resultSet;
@ -231,8 +238,11 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
if (entity == null) { if (entity == null) {
entity = p.getEntity(); entity = p.getEntity();
} else if (entity != p.getEntity()) { } else if (entity != p.getEntity()) {
throw new HelenusMappingException("you can insert only single entity " + entity.getMappingInterface() throw new HelenusMappingException(
+ " or " + p.getEntity().getMappingInterface()); "you can insert only single entity "
+ entity.getMappingInterface()
+ " or "
+ p.getEntity().getMappingInterface());
} }
} }
@ -251,6 +261,15 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
return sync(); return sync();
} }
T result = super.sync(uow); T result = super.sync(uow);
if (result != null && pojo != null && !(pojo == result) && pojo.equals(result)) {
// To preserve object identity we need to find this object in cache
// because it was unchanged by the INSERT but pojo in this case was
// the result of a draft.build().
T cachedValue = (T) uow.cacheLookup(bindFacetValues());
if (cachedValue != null) {
result = cachedValue;
}
}
Class<?> iface = entity.getMappingInterface(); Class<?> iface = entity.getMappingInterface();
if (resultType == iface) { if (resultType == iface) {
cacheUpdate(uow, result, entity.getFacets()); cacheUpdate(uow, result, entity.getFacets());
@ -262,6 +281,36 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
return result; return result;
} }
@Override
public List<Facet> bindFacetValues() {
List<Facet> facets = getFacets();
if (facets == null || facets.size() == 0) {
return new ArrayList<Facet>();
}
List<Facet> boundFacets = new ArrayList<>();
Map<HelenusProperty, Object> valuesMap = new HashMap<>(values.size());
values.forEach(t -> valuesMap.put(t._1.getProperty(), t._2));
for (Facet facet : facets) {
if (facet instanceof UnboundFacet) {
UnboundFacet unboundFacet = (UnboundFacet) facet;
UnboundFacet.Binder binder = unboundFacet.binder();
for (HelenusProperty prop : unboundFacet.getProperties()) {
Object value = valuesMap.get(prop);
if (value != null) {
binder.setValueForProperty(prop, value.toString());
}
}
if (binder.isBound()) {
boundFacets.add(binder.bind());
}
} else {
boundFacets.add(facet);
}
}
return boundFacets;
}
@Override @Override
public List<Facet> getFacets() { public List<Facet> getFacets() {
if (entity != null) { if (entity != null) {
@ -270,5 +319,4 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
return new ArrayList<Facet>(); return new ArrayList<Facet>();
} }
} }
} }

View file

@ -15,30 +15,27 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import brave.Span;
import brave.Tracer;
import brave.propagation.TraceContext;
import com.codahale.metrics.Meter;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Timer;
import com.datastax.driver.core.*;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.google.common.base.Stopwatch;
import java.net.InetAddress;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException; import java.util.concurrent.TimeoutException;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.codahale.metrics.Meter;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Timer;
import com.datastax.driver.core.RegularStatement;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.ResultSetFuture;
import com.datastax.driver.core.Statement;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.google.common.base.Stopwatch;
import brave.Span;
import brave.Tracer;
import brave.propagation.TraceContext;
import net.helenus.core.AbstractSessionOperations; import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.UnitOfWork; import net.helenus.core.UnitOfWork;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
import net.helenus.support.HelenusException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class Operation<E> { public abstract class Operation<E> {
@ -83,13 +80,19 @@ public abstract class Operation<E> {
query = regularStatement.getQueryString(); query = regularStatement.getQueryString();
} else { } else {
query = statement.toString(); query = statement.toString();
} }
return query; return query;
} }
public ResultSet execute(AbstractSessionOperations session, UnitOfWork uow, TraceContext traceContext, long timeout, public ResultSet execute(
TimeUnit units, boolean showValues, boolean cached) throws TimeoutException { AbstractSessionOperations session,
UnitOfWork uow,
TraceContext traceContext,
long timeout,
TimeUnit units,
boolean showValues,
boolean cached)
throws TimeoutException {
// Start recording in a Zipkin sub-span our execution time to perform this // Start recording in a Zipkin sub-span our execution time to perform this
// operation. // operation.
@ -110,15 +113,49 @@ public abstract class Operation<E> {
Stopwatch timer = Stopwatch.createStarted(); Stopwatch timer = Stopwatch.createStarted();
try { try {
ResultSetFuture futureResultSet = session.executeAsync(statement, uow, timer, showValues); ResultSetFuture futureResultSet = session.executeAsync(statement, uow, timer, showValues);
if (uow != null) if (uow != null) uow.recordCacheAndDatabaseOperationCount(0, 1);
uow.recordCacheAndDatabaseOperationCount(0, 1);
ResultSet resultSet = futureResultSet.getUninterruptibly(timeout, units); ResultSet resultSet = futureResultSet.getUninterruptibly(timeout, units);
ColumnDefinitions columnDefinitions = resultSet.getColumnDefinitions();
if (LOG.isDebugEnabled()) {
ExecutionInfo ei = resultSet.getExecutionInfo();
Host qh = ei.getQueriedHost();
String oh =
ei.getTriedHosts()
.stream()
.map(Host::getAddress)
.map(InetAddress::toString)
.collect(Collectors.joining(", "));
ConsistencyLevel cl = ei.getAchievedConsistencyLevel();
int se = ei.getSpeculativeExecutions();
String warn = ei.getWarnings().stream().collect(Collectors.joining(", "));
String ri =
String.format(
"%s %s %s %s %s %s%sspec-retries: %d",
"server v" + qh.getCassandraVersion(),
qh.getAddress().toString(),
(oh != null && !oh.equals("")) ? " [tried: " + oh + "]" : "",
qh.getDatacenter(),
qh.getRack(),
(cl != null)
? (" consistency: "
+ cl.name()
+ (cl.isDCLocal() ? " DC " : "")
+ (cl.isSerial() ? " SC " : ""))
: "",
(warn != null && !warn.equals("")) ? ": " + warn : "",
se);
if (uow != null) uow.setInfo(ri);
else LOG.debug(ri);
}
if (!resultSet.wasApplied()
&& !(columnDefinitions.size() > 1 || !columnDefinitions.contains("[applied]"))) {
throw new HelenusException("Operation Failed");
}
return resultSet; return resultSet;
} finally { } finally {
timer.stop(); timer.stop();
if (uow != null) if (uow != null) uow.addDatabaseTime("Cassandra", timer);
uow.addDatabaseTime("Cassandra", timer);
log(statement, uow, timer, showValues); log(statement, uow, timer, showValues);
} }
@ -140,7 +177,8 @@ public abstract class Operation<E> {
if (timer != null) { if (timer != null) {
timerString = String.format(" %s ", timer.toString()); timerString = String.format(" %s ", timer.toString());
} }
LOG.info(String.format("%s%s%s", uowString, timerString, Operation.queryString(statement, false))); LOG.info(
String.format("%s%s%s", uowString, timerString, Operation.queryString(statement, false)));
} }
} }
@ -163,5 +201,4 @@ public abstract class Operation<E> {
public boolean isSessionCacheable() { public boolean isSessionCacheable() {
return false; return false;
} }
} }

View file

@ -43,5 +43,4 @@ public final class PreparedOperation<E> {
public String toString() { public String toString() {
return preparedStatement.getQueryString(); return preparedStatement.getQueryString();
} }
} }

View file

@ -23,7 +23,8 @@ public final class PreparedOptionalOperation<E> {
private final PreparedStatement preparedStatement; private final PreparedStatement preparedStatement;
private final AbstractOptionalOperation<E, ?> operation; private final AbstractOptionalOperation<E, ?> operation;
public PreparedOptionalOperation(PreparedStatement statement, AbstractOptionalOperation<E, ?> operation) { public PreparedOptionalOperation(
PreparedStatement statement, AbstractOptionalOperation<E, ?> operation) {
this.preparedStatement = statement; this.preparedStatement = statement;
this.operation = operation; this.operation = operation;
} }

View file

@ -23,7 +23,8 @@ public final class PreparedStreamOperation<E> {
private final PreparedStatement preparedStatement; private final PreparedStatement preparedStatement;
private final AbstractStreamOperation<E, ?> operation; private final AbstractStreamOperation<E, ?> operation;
public PreparedStreamOperation(PreparedStatement statement, AbstractStreamOperation<E, ?> operation) { public PreparedStreamOperation(
PreparedStatement statement, AbstractStreamOperation<E, ?> operation) {
this.preparedStatement = statement; this.preparedStatement = statement;
this.operation = operation; this.operation = operation;
} }

View file

@ -15,16 +15,15 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
import java.util.function.Function; import java.util.function.Function;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
public final class SelectFirstOperation<E> extends AbstractFilterOptionalOperation<E, SelectFirstOperation<E>> { public final class SelectFirstOperation<E>
extends AbstractFilterOptionalOperation<E, SelectFirstOperation<E>> {
private final SelectOperation<E> delegate; private final SelectOperation<E> delegate;

View file

@ -15,18 +15,15 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
import java.util.function.Function; import java.util.function.Function;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
public final class SelectFirstTransformingOperation<R, E> public final class SelectFirstTransformingOperation<R, E>
extends extends AbstractFilterOptionalOperation<R, SelectFirstTransformingOperation<R, E>> {
AbstractFilterOptionalOperation<R, SelectFirstTransformingOperation<R, E>> {
private final SelectOperation<E> delegate; private final SelectOperation<E> delegate;
private final Function<E, R> fn; private final Function<E, R> fn;

View file

@ -15,14 +15,6 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import java.util.*;
import java.util.function.Function;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.Row; import com.datastax.driver.core.Row;
import com.datastax.driver.core.querybuilder.BuiltStatement; import com.datastax.driver.core.querybuilder.BuiltStatement;
@ -32,7 +24,10 @@ import com.datastax.driver.core.querybuilder.Select;
import com.datastax.driver.core.querybuilder.Select.Selection; import com.datastax.driver.core.querybuilder.Select.Selection;
import com.datastax.driver.core.querybuilder.Select.Where; import com.datastax.driver.core.querybuilder.Select.Where;
import com.google.common.collect.Iterables; import com.google.common.collect.Iterables;
import java.util.*;
import java.util.function.Function;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import net.helenus.core.*; import net.helenus.core.*;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
import net.helenus.core.cache.UnboundFacet; import net.helenus.core.cache.UnboundFacet;
@ -45,6 +40,8 @@ import net.helenus.mapping.value.ColumnValueProvider;
import net.helenus.mapping.value.ValueProviderMap; import net.helenus.mapping.value.ValueProviderMap;
import net.helenus.support.Fun; import net.helenus.support.Fun;
import net.helenus.support.HelenusMappingException; import net.helenus.support.HelenusMappingException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, SelectOperation<E>> { public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, SelectOperation<E>> {
@ -62,7 +59,8 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
public SelectOperation(AbstractSessionOperations sessionOperations) { public SelectOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations); super(sessionOperations);
this.rowMapper = new Function<Row, E>() { this.rowMapper =
new Function<Row, E>() {
@Override @Override
public E apply(Row source) { public E apply(Row source) {
@ -85,25 +83,35 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
super(sessionOperations); super(sessionOperations);
entity.getOrderedProperties().stream().map(p -> new HelenusPropertyNode(p, Optional.empty())) entity
.getOrderedProperties()
.stream()
.map(p -> new HelenusPropertyNode(p, Optional.empty()))
.forEach(p -> this.props.add(p)); .forEach(p -> this.props.add(p));
isCacheable = entity.isCacheable(); isCacheable = entity.isCacheable();
} }
public SelectOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity, public SelectOperation(
AbstractSessionOperations sessionOperations,
HelenusEntity entity,
Function<Row, E> rowMapper) { Function<Row, E> rowMapper) {
super(sessionOperations); super(sessionOperations);
this.rowMapper = rowMapper; this.rowMapper = rowMapper;
entity.getOrderedProperties().stream().map(p -> new HelenusPropertyNode(p, Optional.empty())) entity
.getOrderedProperties()
.stream()
.map(p -> new HelenusPropertyNode(p, Optional.empty()))
.forEach(p -> this.props.add(p)); .forEach(p -> this.props.add(p));
isCacheable = entity.isCacheable(); isCacheable = entity.isCacheable();
} }
public SelectOperation(AbstractSessionOperations sessionOperations, Function<Row, E> rowMapper, public SelectOperation(
AbstractSessionOperations sessionOperations,
Function<Row, E> rowMapper,
HelenusPropertyNode... props) { HelenusPropertyNode... props) {
super(sessionOperations); super(sessionOperations);
@ -120,8 +128,11 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
if (entity == null) { if (entity == null) {
entity = prop.getEntity(); entity = prop.getEntity();
} else if (entity != prop.getEntity()) { } else if (entity != prop.getEntity()) {
throw new HelenusMappingException("you can count records only from a single entity " throw new HelenusMappingException(
+ entity.getMappingInterface() + " or " + prop.getEntity().getMappingInterface()); "you can count records only from a single entity "
+ entity.getMappingInterface()
+ " or "
+ prop.getEntity().getMappingInterface());
} }
} }
@ -133,7 +144,10 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
HelenusEntity entity = Helenus.entity(materializedViewClass); HelenusEntity entity = Helenus.entity(materializedViewClass);
this.alternateTableName = entity.getName().toCql(); this.alternateTableName = entity.getName().toCql();
this.props.clear(); this.props.clear();
entity.getOrderedProperties().stream().map(p -> new HelenusPropertyNode(p, Optional.empty())) entity
.getOrderedProperties()
.stream()
.map(p -> new HelenusPropertyNode(p, Optional.empty()))
.forEach(p -> this.props.add(p)); .forEach(p -> this.props.add(p));
return this; return this;
} }
@ -151,7 +165,9 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
this.rowMapper = null; this.rowMapper = null;
return new SelectTransformingOperation<R, E>(this, (r) -> { return new SelectTransformingOperation<R, E>(
this,
(r) -> {
Map<String, Object> map = new ValueProviderMap(r, sessionOps.getValueProvider(), entity); Map<String, Object> map = new ValueProviderMap(r, sessionOps.getValueProvider(), entity);
return (R) Helenus.map(entityClass, map); return (R) Helenus.map(entityClass, map);
}); });
@ -217,7 +233,6 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
} }
} }
} }
} }
if (binder.isBound()) { if (binder.isBound()) {
boundFacets.add(binder.bind()); boundFacets.add(binder.bind());
@ -239,15 +254,14 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
String columnName = prop.getColumnName(); String columnName = prop.getColumnName();
selection = selection.column(columnName); selection = selection.column(columnName);
if (prop.getProperty().caseSensitiveIndex()) {
allowFiltering = true;
}
if (entity == null) { if (entity == null) {
entity = prop.getEntity(); entity = prop.getEntity();
} else if (entity != prop.getEntity()) { } else if (entity != prop.getEntity()) {
throw new HelenusMappingException("you can select columns only from a single entity " throw new HelenusMappingException(
+ entity.getMappingInterface() + " or " + prop.getEntity().getMappingInterface()); "you can select columns only from a single entity "
+ entity.getMappingInterface()
+ " or "
+ prop.getEntity().getMappingInterface());
} }
// TODO(gburd): writeTime and ttl will be useful on merge() but cause object // TODO(gburd): writeTime and ttl will be useful on merge() but cause object
@ -291,8 +305,28 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
Where where = select.where(); Where where = select.where();
boolean isFirstIndex = true;
for (Filter<?> filter : filters.values()) { for (Filter<?> filter : filters.values()) {
where.and(filter.getClause(sessionOps.getValuePreparer())); where.and(filter.getClause(sessionOps.getValuePreparer()));
HelenusProperty prop = filter.getNode().getProperty();
if (allowFiltering == false) {
switch (prop.getColumnType()) {
case PARTITION_KEY:
case CLUSTERING_COLUMN:
break;
default:
// When using non-Cassandra-standard 2i types or when using more than one
// indexed column or non-indexed columns the query must include ALLOW FILTERING.
if (prop.caseSensitiveIndex()) {
allowFiltering = true;
} else if (prop.getIndexName() != null) {
allowFiltering |= !isFirstIndex;
isFirstIndex = false;
} else {
allowFiltering = true;
}
}
}
} }
} }
@ -311,12 +345,14 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
@Override @Override
public Stream<E> transform(ResultSet resultSet) { public Stream<E> transform(ResultSet resultSet) {
if (rowMapper != null) { if (rowMapper != null) {
return StreamSupport return StreamSupport.stream(
.stream(Spliterators.spliteratorUnknownSize(resultSet.iterator(), Spliterator.ORDERED), false) Spliterators.spliteratorUnknownSize(resultSet.iterator(), Spliterator.ORDERED), false)
.map(rowMapper); .map(rowMapper);
} else { } else {
return (Stream<E>) StreamSupport return (Stream<E>)
.stream(Spliterators.spliteratorUnknownSize(resultSet.iterator(), Spliterator.ORDERED), false); StreamSupport.stream(
Spliterators.spliteratorUnknownSize(resultSet.iterator(), Spliterator.ORDERED),
false);
} }
} }

View file

@ -15,18 +15,15 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import java.util.List; import java.util.List;
import java.util.function.Function; import java.util.function.Function;
import java.util.stream.Stream; import java.util.stream.Stream;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
public final class SelectTransformingOperation<R, E> public final class SelectTransformingOperation<R, E>
extends extends AbstractFilterStreamOperation<R, SelectTransformingOperation<R, E>> {
AbstractFilterStreamOperation<R, SelectTransformingOperation<R, E>> {
private final SelectOperation<E> delegate; private final SelectOperation<E> delegate;
private final Function<E, R> fn; private final Function<E, R> fn;

View file

@ -15,17 +15,15 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import java.util.*;
import java.util.concurrent.TimeoutException;
import java.util.function.Function;
import java.util.stream.Collectors;
import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.Assignment; import com.datastax.driver.core.querybuilder.Assignment;
import com.datastax.driver.core.querybuilder.BuiltStatement; import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.datastax.driver.core.querybuilder.QueryBuilder; import com.datastax.driver.core.querybuilder.QueryBuilder;
import com.datastax.driver.core.querybuilder.Update; import com.datastax.driver.core.querybuilder.Update;
import java.util.*;
import java.util.concurrent.TimeoutException;
import java.util.function.Function;
import java.util.stream.Collectors;
import net.helenus.core.*; import net.helenus.core.*;
import net.helenus.core.cache.BoundFacet; import net.helenus.core.cache.BoundFacet;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
@ -36,6 +34,7 @@ import net.helenus.mapping.HelenusProperty;
import net.helenus.mapping.MappingUtil; import net.helenus.mapping.MappingUtil;
import net.helenus.mapping.value.BeanColumnValueProvider; import net.helenus.mapping.value.BeanColumnValueProvider;
import net.helenus.mapping.value.ValueProviderMap; import net.helenus.mapping.value.ValueProviderMap;
import net.helenus.support.HelenusException;
import net.helenus.support.HelenusMappingException; import net.helenus.support.HelenusMappingException;
import net.helenus.support.Immutables; import net.helenus.support.Immutables;
@ -55,7 +54,8 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
this.draftMap = null; this.draftMap = null;
} }
public UpdateOperation(AbstractSessionOperations sessionOperations, AbstractEntityDraft<E> draft) { public UpdateOperation(
AbstractSessionOperations sessionOperations, AbstractEntityDraft<E> draft) {
super(sessionOperations); super(sessionOperations);
this.draft = draft; this.draft = draft;
this.draftMap = draft.toMap(); this.draftMap = draft.toMap();
@ -69,7 +69,8 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
this.entity = Helenus.resolve(MappingUtil.getMappingInterface(pojo)); this.entity = Helenus.resolve(MappingUtil.getMappingInterface(pojo));
} }
public UpdateOperation(AbstractSessionOperations sessionOperations, HelenusPropertyNode p, Object v) { public UpdateOperation(
AbstractSessionOperations sessionOperations, HelenusPropertyNode p, Object v) {
super(sessionOperations); super(sessionOperations);
this.draft = null; this.draft = null;
this.draftMap = null; this.draftMap = null;
@ -91,8 +92,8 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
if (draft != null) { if (draft != null) {
String key = prop.getPropertyName(); String key = prop.getPropertyName();
if (draft.get(key, value.getClass()) != value) { if (draft.get(key, value.getClass()) != v) {
draft.set(key, value); draft.set(key, v);
} }
} }
@ -101,8 +102,8 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
String key = prop.getPropertyName(); String key = prop.getPropertyName();
Map<String, Object> map = ((MapExportable) pojo).toMap(); Map<String, Object> map = ((MapExportable) pojo).toMap();
if (!(map instanceof ValueProviderMap)) { if (!(map instanceof ValueProviderMap)) {
if (map.get(key) != value) { if (map.get(key) != v) {
map.put(key, value); map.put(key, v);
} }
} }
} }
@ -193,7 +194,9 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
BoundFacet facet = null; BoundFacet facet = null;
if (pojo != null) { if (pojo != null) {
HelenusProperty prop = p.getProperty(); HelenusProperty prop = p.getProperty();
List<V> list = new ArrayList<V>((List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop)); List<V> list =
new ArrayList<V>(
(List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
list.add(0, value); list.add(0, value);
facet = new BoundFacet(prop, list); facet = new BoundFacet(prop, list);
} else if (draft != null) { } else if (draft != null) {
@ -220,7 +223,9 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
BoundFacet facet = null; BoundFacet facet = null;
if (pojo != null) { if (pojo != null) {
HelenusProperty prop = p.getProperty(); HelenusProperty prop = p.getProperty();
List<V> list = new ArrayList<V>((List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop)); List<V> list =
new ArrayList<V>(
(List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
list.addAll(0, value); list.addAll(0, value);
facet = new BoundFacet(prop, list); facet = new BoundFacet(prop, list);
} else if (draft != null && value.size() > 0) { } else if (draft != null && value.size() > 0) {
@ -249,7 +254,9 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
List<V> list; List<V> list;
HelenusProperty prop = p.getProperty(); HelenusProperty prop = p.getProperty();
if (pojo != null) { if (pojo != null) {
list = new ArrayList<V>((List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop)); list =
new ArrayList<V>(
(List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
} else { } else {
String key = p.getProperty().getPropertyName(); String key = p.getProperty().getPropertyName();
list = (List<V>) draftMap.get(key); list = (List<V>) draftMap.get(key);
@ -283,7 +290,9 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
BoundFacet facet = null; BoundFacet facet = null;
if (pojo != null) { if (pojo != null) {
HelenusProperty prop = p.getProperty(); HelenusProperty prop = p.getProperty();
List<V> list = new ArrayList<V>((List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop)); List<V> list =
new ArrayList<V>(
(List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
list.add(value); list.add(value);
facet = new BoundFacet(prop, list); facet = new BoundFacet(prop, list);
} else if (draft != null) { } else if (draft != null) {
@ -309,7 +318,9 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
BoundFacet facet = null; BoundFacet facet = null;
if (pojo != null) { if (pojo != null) {
HelenusProperty prop = p.getProperty(); HelenusProperty prop = p.getProperty();
List<V> list = new ArrayList<V>((List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop)); List<V> list =
new ArrayList<V>(
(List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
list.addAll(value); list.addAll(value);
facet = new BoundFacet(prop, list); facet = new BoundFacet(prop, list);
} else if (draft != null && value.size() > 0) { } else if (draft != null && value.size() > 0) {
@ -335,7 +346,9 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
BoundFacet facet = null; BoundFacet facet = null;
if (pojo != null) { if (pojo != null) {
HelenusProperty prop = p.getProperty(); HelenusProperty prop = p.getProperty();
List<V> list = new ArrayList<V>((List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop)); List<V> list =
new ArrayList<V>(
(List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
list.remove(value); list.remove(value);
facet = new BoundFacet(prop, list); facet = new BoundFacet(prop, list);
} else if (draft != null) { } else if (draft != null) {
@ -361,7 +374,9 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
BoundFacet facet = null; BoundFacet facet = null;
if (pojo != null) { if (pojo != null) {
HelenusProperty prop = p.getProperty(); HelenusProperty prop = p.getProperty();
List<V> list = new ArrayList<V>((List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop)); List<V> list =
new ArrayList<V>(
(List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
list.removeAll(value); list.removeAll(value);
facet = new BoundFacet(prop, list); facet = new BoundFacet(prop, list);
} else if (draft != null) { } else if (draft != null) {
@ -381,7 +396,8 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
Object valueObj = value; Object valueObj = value;
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository()); Optional<Function<Object, Object>> converter =
prop.getWriteConverter(sessionOps.getSessionRepository());
if (converter.isPresent()) { if (converter.isPresent()) {
List convertedList = (List) converter.get().apply(Immutables.listOf(value)); List convertedList = (List) converter.get().apply(Immutables.listOf(value));
valueObj = convertedList.get(0); valueObj = convertedList.get(0);
@ -396,7 +412,8 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
List valueObj = value; List valueObj = value;
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository()); Optional<Function<Object, Object>> converter =
prop.getWriteConverter(sessionOps.getSessionRepository());
if (converter.isPresent()) { if (converter.isPresent()) {
valueObj = (List) converter.get().apply(value); valueObj = (List) converter.get().apply(value);
} }
@ -423,7 +440,8 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
BoundFacet facet = null; BoundFacet facet = null;
if (pojo != null) { if (pojo != null) {
HelenusProperty prop = p.getProperty(); HelenusProperty prop = p.getProperty();
Set<V> set = new HashSet<V>((Set<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop)); Set<V> set =
new HashSet<V>((Set<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
set.add(value); set.add(value);
facet = new BoundFacet(prop, set); facet = new BoundFacet(prop, set);
} else if (draft != null) { } else if (draft != null) {
@ -449,7 +467,8 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
BoundFacet facet = null; BoundFacet facet = null;
if (pojo != null) { if (pojo != null) {
HelenusProperty prop = p.getProperty(); HelenusProperty prop = p.getProperty();
Set<V> set = new HashSet<V>((Set<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop)); Set<V> set =
new HashSet<V>((Set<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
set.addAll(value); set.addAll(value);
facet = new BoundFacet(prop, set); facet = new BoundFacet(prop, set);
} else if (draft != null) { } else if (draft != null) {
@ -475,7 +494,8 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
BoundFacet facet = null; BoundFacet facet = null;
if (pojo != null) { if (pojo != null) {
HelenusProperty prop = p.getProperty(); HelenusProperty prop = p.getProperty();
Set<V> set = new HashSet<V>((Set<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop)); Set<V> set =
new HashSet<V>((Set<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
set.remove(value); set.remove(value);
facet = new BoundFacet(prop, set); facet = new BoundFacet(prop, set);
} else if (draft != null) { } else if (draft != null) {
@ -501,7 +521,8 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
BoundFacet facet = null; BoundFacet facet = null;
if (pojo != null) { if (pojo != null) {
HelenusProperty prop = p.getProperty(); HelenusProperty prop = p.getProperty();
Set<V> set = new HashSet<V>((Set<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop)); Set<V> set =
new HashSet<V>((Set<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
set.removeAll(value); set.removeAll(value);
facet = new BoundFacet(prop, set); facet = new BoundFacet(prop, set);
} else if (draft != null) { } else if (draft != null) {
@ -521,7 +542,8 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
HelenusProperty prop = p.getProperty(); HelenusProperty prop = p.getProperty();
Object valueObj = value; Object valueObj = value;
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository()); Optional<Function<Object, Object>> converter =
prop.getWriteConverter(sessionOps.getSessionRepository());
if (converter.isPresent()) { if (converter.isPresent()) {
Set convertedSet = (Set) converter.get().apply(Immutables.setOf(value)); Set convertedSet = (Set) converter.get().apply(Immutables.setOf(value));
valueObj = convertedSet.iterator().next(); valueObj = convertedSet.iterator().next();
@ -535,7 +557,8 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
HelenusProperty prop = p.getProperty(); HelenusProperty prop = p.getProperty();
Set valueObj = value; Set valueObj = value;
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository()); Optional<Function<Object, Object>> converter =
prop.getWriteConverter(sessionOps.getSessionRepository());
if (converter.isPresent()) { if (converter.isPresent()) {
valueObj = (Set) converter.get().apply(value); valueObj = (Set) converter.get().apply(value);
} }
@ -561,7 +584,8 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
BoundFacet facet = null; BoundFacet facet = null;
if (pojo != null) { if (pojo != null) {
Map<K, V> map = new HashMap<K, V>( Map<K, V> map =
new HashMap<K, V>(
(Map<K, V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop)); (Map<K, V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
map.put(key, value); map.put(key, value);
facet = new BoundFacet(prop, map); facet = new BoundFacet(prop, map);
@ -569,10 +593,11 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
((Map<K, V>) draftMap.get(prop.getPropertyName())).put(key, value); ((Map<K, V>) draftMap.get(prop.getPropertyName())).put(key, value);
} }
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository()); Optional<Function<Object, Object>> converter =
prop.getWriteConverter(sessionOps.getSessionRepository());
if (converter.isPresent()) { if (converter.isPresent()) {
Map<Object, Object> convertedMap = (Map<Object, Object>) converter.get() Map<Object, Object> convertedMap =
.apply(Immutables.mapOf(key, value)); (Map<Object, Object>) converter.get().apply(Immutables.mapOf(key, value));
for (Map.Entry<Object, Object> e : convertedMap.entrySet()) { for (Map.Entry<Object, Object> e : convertedMap.entrySet()) {
assignments.put(QueryBuilder.put(p.getColumnName(), e.getKey(), e.getValue()), facet); assignments.put(QueryBuilder.put(p.getColumnName(), e.getKey(), e.getValue()), facet);
} }
@ -595,7 +620,8 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
BoundFacet facet = null; BoundFacet facet = null;
if (pojo != null) { if (pojo != null) {
Map<K, V> newMap = new HashMap<K, V>( Map<K, V> newMap =
new HashMap<K, V>(
(Map<K, V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop)); (Map<K, V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
newMap.putAll(map); newMap.putAll(map);
facet = new BoundFacet(prop, newMap); facet = new BoundFacet(prop, newMap);
@ -603,7 +629,8 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
((Map<K, V>) draftMap.get(prop.getPropertyName())).putAll(map); ((Map<K, V>) draftMap.get(prop.getPropertyName())).putAll(map);
} }
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository()); Optional<Function<Object, Object>> converter =
prop.getWriteConverter(sessionOps.getSessionRepository());
if (converter.isPresent()) { if (converter.isPresent()) {
Map convertedMap = (Map) converter.get().apply(map); Map convertedMap = (Map) converter.get().apply(map);
assignments.put(QueryBuilder.putAll(p.getColumnName(), convertedMap), facet); assignments.put(QueryBuilder.putAll(p.getColumnName(), convertedMap), facet);
@ -656,6 +683,10 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
@Override @Override
public E transform(ResultSet resultSet) { public E transform(ResultSet resultSet) {
if ((ifFilters != null && !ifFilters.isEmpty()) && (resultSet.wasApplied() == false)) {
throw new HelenusException("Statement was not applied due to consistency constraints");
}
if (draft != null) { if (draft != null) {
return Helenus.map(draft.getEntityClass(), draft.toMap(draftMap)); return Helenus.map(draft.getEntityClass(), draft.toMap(draftMap));
} else { } else {
@ -679,8 +710,11 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
if (entity == null) { if (entity == null) {
entity = p.getEntity(); entity = p.getEntity();
} else if (entity != p.getEntity()) { } else if (entity != p.getEntity()) {
throw new HelenusMappingException("you can update columns only in single entity " throw new HelenusMappingException(
+ entity.getMappingInterface() + " or " + p.getEntity().getMappingInterface()); "you can update columns only in single entity "
+ entity.getMappingInterface()
+ " or "
+ p.getEntity().getMappingInterface());
} }
} }
@ -709,6 +743,7 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
cacheUpdate(uow, result, bindFacetValues()); cacheUpdate(uow, result, bindFacetValues());
} else if (pojo != null) { } else if (pojo != null) {
cacheUpdate(uow, (E) pojo, bindFacetValues()); cacheUpdate(uow, (E) pojo, bindFacetValues());
return (E) pojo;
} }
return result; return result;
} }
@ -716,7 +751,13 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
@Override @Override
public List<Facet> bindFacetValues() { public List<Facet> bindFacetValues() {
List<Facet> facets = bindFacetValues(entity.getFacets()); List<Facet> facets = bindFacetValues(entity.getFacets());
facets.addAll(assignments.values().stream().distinct().filter(o -> o != null).collect(Collectors.toList())); facets.addAll(
assignments
.values()
.stream()
.distinct()
.filter(o -> o != null)
.collect(Collectors.toList()));
return facets; return facets;
} }
@ -728,5 +769,4 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
return new ArrayList<Facet>(); return new ArrayList<Facet>();
} }
} }
} }

View file

@ -19,10 +19,17 @@ import java.util.HashMap;
import java.util.Map; import java.util.Map;
public enum DefaultPrimitiveTypes { public enum DefaultPrimitiveTypes {
BOOLEAN(boolean.class, false), BYTE(byte.class, (byte) 0x0), CHAR(char.class, (char) 0x0), SHORT(short.class, BOOLEAN(boolean.class, false),
(short) 0), INT(int.class, 0), LONG(long.class, 0L), FLOAT(float.class, 0.0f), DOUBLE(double.class, 0.0); BYTE(byte.class, (byte) 0x0),
CHAR(char.class, (char) 0x0),
SHORT(short.class, (short) 0),
INT(int.class, 0),
LONG(long.class, 0L),
FLOAT(float.class, 0.0f),
DOUBLE(double.class, 0.0);
private static final Map<Class<?>, DefaultPrimitiveTypes> map = new HashMap<Class<?>, DefaultPrimitiveTypes>(); private static final Map<Class<?>, DefaultPrimitiveTypes> map =
new HashMap<Class<?>, DefaultPrimitiveTypes>();
static { static {
for (DefaultPrimitiveTypes type : DefaultPrimitiveTypes.values()) { for (DefaultPrimitiveTypes type : DefaultPrimitiveTypes.values()) {

View file

@ -16,7 +16,6 @@
package net.helenus.core.reflect; package net.helenus.core.reflect;
import com.datastax.driver.core.Metadata; import com.datastax.driver.core.Metadata;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
public interface DslExportable { public interface DslExportable {

View file

@ -15,6 +15,7 @@
*/ */
package net.helenus.core.reflect; package net.helenus.core.reflect;
import com.datastax.driver.core.*;
import java.lang.reflect.InvocationHandler; import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.lang.reflect.Proxy; import java.lang.reflect.Proxy;
@ -22,9 +23,6 @@ import java.util.Collection;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.Optional; import java.util.Optional;
import com.datastax.driver.core.*;
import net.helenus.core.Helenus; import net.helenus.core.Helenus;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.HelenusMappingEntity; import net.helenus.mapping.HelenusMappingEntity;
@ -46,7 +44,10 @@ public class DslInvocationHandler<E> implements InvocationHandler {
private HelenusEntity entity = null; private HelenusEntity entity = null;
private Metadata metadata = null; private Metadata metadata = null;
public DslInvocationHandler(Class<E> iface, ClassLoader classLoader, Optional<HelenusPropertyNode> parent, public DslInvocationHandler(
Class<E> iface,
ClassLoader classLoader,
Optional<HelenusPropertyNode> parent,
Metadata metadata) { Metadata metadata) {
this.metadata = metadata; this.metadata = metadata;
@ -75,8 +76,12 @@ public class DslInvocationHandler<E> implements InvocationHandler {
if (type instanceof UDTDataType && !UDTValue.class.isAssignableFrom(javaType)) { if (type instanceof UDTDataType && !UDTValue.class.isAssignableFrom(javaType)) {
Object childDsl = Helenus.dsl(javaType, classLoader, Object childDsl =
Optional.of(new HelenusPropertyNode(prop, parent)), metadata); Helenus.dsl(
javaType,
classLoader,
Optional.of(new HelenusPropertyNode(prop, parent)),
metadata);
udtMap.put(prop.getGetterMethod(), childDsl); udtMap.put(prop.getGetterMethod(), childDsl);
} }
@ -84,10 +89,15 @@ public class DslInvocationHandler<E> implements InvocationHandler {
if (type instanceof DTDataType) { if (type instanceof DTDataType) {
DTDataType dataType = (DTDataType) type; DTDataType dataType = (DTDataType) type;
if (dataType.getDataType() instanceof TupleType && !TupleValue.class.isAssignableFrom(javaType)) { if (dataType.getDataType() instanceof TupleType
&& !TupleValue.class.isAssignableFrom(javaType)) {
Object childDsl = Helenus.dsl(javaType, classLoader, Object childDsl =
Optional.of(new HelenusPropertyNode(prop, parent)), metadata); Helenus.dsl(
javaType,
classLoader,
Optional.of(new HelenusPropertyNode(prop, parent)),
metadata);
tupleMap.put(prop.getGetterMethod(), childDsl); tupleMap.put(prop.getGetterMethod(), childDsl);
} }
@ -115,7 +125,9 @@ public class DslInvocationHandler<E> implements InvocationHandler {
return false; return false;
} }
if (DslExportable.SET_METADATA_METHOD.equals(methodName) && args.length == 1 && args[0] instanceof Metadata) { if (DslExportable.SET_METADATA_METHOD.equals(methodName)
&& args.length == 1
&& args[0] instanceof Metadata) {
if (metadata == null) { if (metadata == null) {
this.setCassandraMetadataForHelenusSession((Metadata) args[0]); this.setCassandraMetadataForHelenusSession((Metadata) args[0]);
} }

View file

@ -19,9 +19,7 @@ import java.lang.annotation.Annotation;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.Optional; import java.util.Optional;
import java.util.function.Function; import java.util.function.Function;
import javax.validation.ConstraintValidator; import javax.validation.ConstraintValidator;
import net.helenus.core.SessionRepository; import net.helenus.core.SessionRepository;
import net.helenus.mapping.*; import net.helenus.mapping.*;
import net.helenus.mapping.type.AbstractDataType; import net.helenus.mapping.type.AbstractDataType;

View file

@ -17,7 +17,6 @@ package net.helenus.core.reflect;
import java.util.*; import java.util.*;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;

View file

@ -16,7 +16,6 @@
package net.helenus.core.reflect; package net.helenus.core.reflect;
import java.util.*; import java.util.*;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
import net.helenus.support.DslPropertyException; import net.helenus.support.DslPropertyException;
import net.helenus.support.HelenusMappingException; import net.helenus.support.HelenusMappingException;

View file

@ -19,7 +19,6 @@ import java.util.Collection;
import java.util.Map; import java.util.Map;
import java.util.Optional; import java.util.Optional;
import java.util.Set; import java.util.Set;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
import net.helenus.support.DslPropertyException; import net.helenus.support.DslPropertyException;
import net.helenus.support.HelenusMappingException; import net.helenus.support.HelenusMappingException;

View file

@ -27,7 +27,6 @@ import java.lang.reflect.Proxy;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import net.helenus.core.Helenus; import net.helenus.core.Helenus;
import net.helenus.mapping.annotation.Transient; import net.helenus.mapping.annotation.Transient;
import net.helenus.mapping.value.ValueProviderMap; import net.helenus.mapping.value.ValueProviderMap;
@ -52,21 +51,26 @@ public class MapperInvocationHandler<E> implements InvocationHandler, Serializab
// https://zeroturnaround.com/rebellabs/recognize-and-conquer-java-proxies-default-methods-and-method-handles/ // https://zeroturnaround.com/rebellabs/recognize-and-conquer-java-proxies-default-methods-and-method-handles/
// First, we need an instance of a private inner-class found in MethodHandles. // First, we need an instance of a private inner-class found in MethodHandles.
Constructor<MethodHandles.Lookup> constructor = MethodHandles.Lookup.class.getDeclaredConstructor(Class.class, Constructor<MethodHandles.Lookup> constructor =
int.class); MethodHandles.Lookup.class.getDeclaredConstructor(Class.class, int.class);
constructor.setAccessible(true); constructor.setAccessible(true);
// Now we need to lookup and invoke special the default method on the interface // Now we need to lookup and invoke special the default method on the interface
// class. // class.
final Class<?> declaringClass = method.getDeclaringClass(); final Class<?> declaringClass = method.getDeclaringClass();
Object result = constructor.newInstance(declaringClass, MethodHandles.Lookup.PRIVATE) Object result =
.unreflectSpecial(method, declaringClass).bindTo(proxy).invokeWithArguments(args); constructor
.newInstance(declaringClass, MethodHandles.Lookup.PRIVATE)
.unreflectSpecial(method, declaringClass)
.bindTo(proxy)
.invokeWithArguments(args);
return result; return result;
} }
private Object writeReplace() { private Object writeReplace() {
return new SerializationProxy<E>(this); return new SerializationProxy<E>(this);
} }
private void readObject(ObjectInputStream stream) throws InvalidObjectException { private void readObject(ObjectInputStream stream) throws InvalidObjectException {
throw new InvalidObjectException("Proxy required."); throw new InvalidObjectException("Proxy required.");
} }
@ -94,6 +98,9 @@ public class MapperInvocationHandler<E> implements InvocationHandler, Serializab
if (otherObj instanceof MapExportable && src.equals(((MapExportable) otherObj).toMap())) { if (otherObj instanceof MapExportable && src.equals(((MapExportable) otherObj).toMap())) {
return true; return true;
} }
if (src instanceof MapExportable && otherObj.equals(((MapExportable) src).toMap())) {
return true;
}
return false; return false;
} }
@ -122,7 +129,7 @@ public class MapperInvocationHandler<E> implements InvocationHandler, Serializab
} }
if (MapExportable.TO_MAP_METHOD.equals(methodName)) { if (MapExportable.TO_MAP_METHOD.equals(methodName)) {
return src; // return Collections.unmodifiableMap(src); return src; // Collections.unmodifiableMap(src);
} }
Object value = src.get(methodName); Object value = src.get(methodName);
@ -176,6 +183,5 @@ public class MapperInvocationHandler<E> implements InvocationHandler, Serializab
Object readResolve() throws ObjectStreamException { Object readResolve() throws ObjectStreamException {
return new MapperInvocationHandler(iface, src); return new MapperInvocationHandler(iface, src);
} }
} }
} }

View file

@ -15,11 +15,9 @@
*/ */
package net.helenus.core.reflect; package net.helenus.core.reflect;
import com.datastax.driver.core.Metadata;
import java.lang.reflect.Proxy; import java.lang.reflect.Proxy;
import java.util.Optional; import java.util.Optional;
import com.datastax.driver.core.Metadata;
import net.helenus.core.DslInstantiator; import net.helenus.core.DslInstantiator;
public enum ReflectionDslInstantiator implements DslInstantiator { public enum ReflectionDslInstantiator implements DslInstantiator {
@ -27,10 +25,15 @@ public enum ReflectionDslInstantiator implements DslInstantiator {
@Override @Override
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public <E> E instantiate(Class<E> iface, ClassLoader classLoader, Optional<HelenusPropertyNode> parent, public <E> E instantiate(
Class<E> iface,
ClassLoader classLoader,
Optional<HelenusPropertyNode> parent,
Metadata metadata) { Metadata metadata) {
DslInvocationHandler<E> handler = new DslInvocationHandler<E>(iface, classLoader, parent, metadata); DslInvocationHandler<E> handler =
E proxy = (E) Proxy.newProxyInstance(classLoader, new Class[]{iface, DslExportable.class}, handler); new DslInvocationHandler<E>(iface, classLoader, parent, metadata);
E proxy =
(E) Proxy.newProxyInstance(classLoader, new Class[] {iface, DslExportable.class}, handler);
return proxy; return proxy;
} }
} }

View file

@ -19,8 +19,7 @@ import net.helenus.support.HelenusMappingException;
public final class ReflectionInstantiator { public final class ReflectionInstantiator {
private ReflectionInstantiator() { private ReflectionInstantiator() {}
}
public static <T> T instantiateClass(Class<T> clazz) { public static <T> T instantiateClass(Class<T> clazz) {

View file

@ -18,7 +18,6 @@ package net.helenus.core.reflect;
import java.io.Serializable; import java.io.Serializable;
import java.lang.reflect.Proxy; import java.lang.reflect.Proxy;
import java.util.Map; import java.util.Map;
import net.helenus.core.MapperInstantiator; import net.helenus.core.MapperInstantiator;
public enum ReflectionMapperInstantiator implements MapperInstantiator { public enum ReflectionMapperInstantiator implements MapperInstantiator {
@ -29,8 +28,10 @@ public enum ReflectionMapperInstantiator implements MapperInstantiator {
public <E> E instantiate(Class<E> iface, Map<String, Object> src, ClassLoader classLoader) { public <E> E instantiate(Class<E> iface, Map<String, Object> src, ClassLoader classLoader) {
MapperInvocationHandler<E> handler = new MapperInvocationHandler<E>(iface, src); MapperInvocationHandler<E> handler = new MapperInvocationHandler<E>(iface, src);
E proxy = (E) Proxy.newProxyInstance(classLoader, new Class[]{iface, MapExportable.class, Serializable.class}, E proxy =
handler); (E)
Proxy.newProxyInstance(
classLoader, new Class[] {iface, MapExportable.class, Serializable.class}, handler);
return proxy; return proxy;
} }
} }

View file

@ -18,7 +18,6 @@ package net.helenus.core.reflect;
import java.util.Collection; import java.util.Collection;
import java.util.Iterator; import java.util.Iterator;
import java.util.Set; import java.util.Set;
import net.helenus.support.HelenusMappingException; import net.helenus.support.HelenusMappingException;
public final class SetDsl<V> implements Set<V> { public final class SetDsl<V> implements Set<V> {

View file

@ -16,7 +16,6 @@
package net.helenus.mapping; package net.helenus.mapping;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import net.helenus.mapping.annotation.ClusteringColumn; import net.helenus.mapping.annotation.ClusteringColumn;
import net.helenus.mapping.annotation.Column; import net.helenus.mapping.annotation.Column;
import net.helenus.mapping.annotation.PartitionKey; import net.helenus.mapping.annotation.PartitionKey;
@ -103,13 +102,21 @@ public final class ColumnInformation {
private void ensureSingleColumnType(ColumnType columnTypeLocal, Method getter) { private void ensureSingleColumnType(ColumnType columnTypeLocal, Method getter) {
if (columnTypeLocal != ColumnType.COLUMN) { if (columnTypeLocal != ColumnType.COLUMN) {
throw new HelenusMappingException("property can be annotated only by a single column type " + getter); throw new HelenusMappingException(
"property can be annotated only by a single column type " + getter);
} }
} }
@Override @Override
public String toString() { public String toString() {
return "ColumnInformation [columnName=" + columnName + ", columnType=" + columnType + ", ordinal=" + ordinal return "ColumnInformation [columnName="
+ ", ordering=" + ordering + "]"; + columnName
+ ", columnType="
+ columnType
+ ", ordinal="
+ ordinal
+ ", ordering="
+ ordering
+ "]";
} }
} }

View file

@ -16,5 +16,8 @@
package net.helenus.mapping; package net.helenus.mapping;
public enum ColumnType { public enum ColumnType {
PARTITION_KEY, CLUSTERING_COLUMN, STATIC_COLUMN, COLUMN; PARTITION_KEY,
CLUSTERING_COLUMN,
STATIC_COLUMN,
COLUMN;
} }

View file

@ -17,7 +17,6 @@ package net.helenus.mapping;
import java.util.Collection; import java.util.Collection;
import java.util.List; import java.util.List;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
public interface HelenusEntity { public interface HelenusEntity {

View file

@ -16,5 +16,8 @@
package net.helenus.mapping; package net.helenus.mapping;
public enum HelenusEntityType { public enum HelenusEntityType {
TABLE, VIEW, TUPLE, UDT; TABLE,
VIEW,
TUPLE,
UDT;
} }

View file

@ -15,18 +15,13 @@
*/ */
package net.helenus.mapping; package net.helenus.mapping;
import java.lang.reflect.Method;
import java.util.*;
import javax.validation.ConstraintValidator;
import org.apache.commons.lang3.ClassUtils;
import com.datastax.driver.core.DefaultMetadata; import com.datastax.driver.core.DefaultMetadata;
import com.datastax.driver.core.Metadata; import com.datastax.driver.core.Metadata;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import java.lang.reflect.Method;
import java.util.*;
import javax.validation.ConstraintValidator;
import net.helenus.config.HelenusSettings; import net.helenus.config.HelenusSettings;
import net.helenus.core.Helenus; import net.helenus.core.Helenus;
import net.helenus.core.annotation.Cacheable; import net.helenus.core.annotation.Cacheable;
@ -35,6 +30,7 @@ import net.helenus.core.cache.UnboundFacet;
import net.helenus.mapping.annotation.*; import net.helenus.mapping.annotation.*;
import net.helenus.mapping.validator.DistinctValidator; import net.helenus.mapping.validator.DistinctValidator;
import net.helenus.support.HelenusMappingException; import net.helenus.support.HelenusMappingException;
import org.apache.commons.lang3.ClassUtils;
public final class HelenusMappingEntity implements HelenusEntity { public final class HelenusMappingEntity implements HelenusEntity {
@ -69,7 +65,8 @@ public final class HelenusMappingEntity implements HelenusEntity {
} }
for (Class<?> c : ClassUtils.getAllInterfaces(iface)) { for (Class<?> c : ClassUtils.getAllInterfaces(iface)) {
if (c.getDeclaredAnnotation(Table.class) != null || c.getDeclaredAnnotation(InheritedTable.class) != null) { if (c.getDeclaredAnnotation(Table.class) != null
|| c.getDeclaredAnnotation(InheritedTable.class) != null) {
for (Method m : c.getDeclaredMethods()) { for (Method m : c.getDeclaredMethods()) {
Method o = methods.get(m.getName()); Method o = methods.get(m.getName());
if (o != null) { if (o != null) {
@ -128,7 +125,8 @@ public final class HelenusMappingEntity implements HelenusEntity {
facetsBuilder.add(new UnboundFacet(primaryKeyProperties)); facetsBuilder.add(new UnboundFacet(primaryKeyProperties));
primaryKeyProperties = null; primaryKeyProperties = null;
} }
for (ConstraintValidator<?, ?> constraint : MappingUtil.getValidators(prop.getGetterMethod())) { for (ConstraintValidator<?, ?> constraint :
MappingUtil.getValidators(prop.getGetterMethod())) {
if (constraint.getClass().isAssignableFrom(DistinctValidator.class)) { if (constraint.getClass().isAssignableFrom(DistinctValidator.class)) {
UnboundFacet facet = new UnboundFacet(prop); UnboundFacet facet = new UnboundFacet(prop);
facetsBuilder.add(facet); facetsBuilder.add(facet);
@ -176,7 +174,8 @@ public final class HelenusMappingEntity implements HelenusEntity {
return HelenusEntityType.UDT; return HelenusEntityType.UDT;
} }
throw new HelenusMappingException("entity must be annotated by @Table or @Tuple or @UserDefinedType " + iface); throw new HelenusMappingException(
"entity must be annotated by @Table or @Tuple or @UserDefinedType " + iface);
} }
@Override @Override
@ -250,7 +249,9 @@ public final class HelenusMappingEntity implements HelenusEntity {
case PARTITION_KEY: case PARTITION_KEY:
if (partitionKeys.get(ordinal)) { if (partitionKeys.get(ordinal)) {
throw new HelenusMappingException( throw new HelenusMappingException(
"detected two or more partition key columns with the same ordinal " + ordinal + " in " "detected two or more partition key columns with the same ordinal "
+ ordinal
+ " in "
+ prop.getEntity()); + prop.getEntity());
} }
partitionKeys.set(ordinal); partitionKeys.set(ordinal);
@ -258,8 +259,11 @@ public final class HelenusMappingEntity implements HelenusEntity {
case CLUSTERING_COLUMN: case CLUSTERING_COLUMN:
if (clusteringColumns.get(ordinal)) { if (clusteringColumns.get(ordinal)) {
throw new HelenusMappingException("detected two or clustering columns with the same ordinal " throw new HelenusMappingException(
+ ordinal + " in " + prop.getEntity()); "detected two or clustering columns with the same ordinal "
+ ordinal
+ " in "
+ prop.getEntity());
} }
clusteringColumns.set(ordinal); clusteringColumns.set(ordinal);
break; break;
@ -273,17 +277,27 @@ public final class HelenusMappingEntity implements HelenusEntity {
private void validateOrdinalsInTuple() { private void validateOrdinalsInTuple() {
boolean[] ordinals = new boolean[props.size()]; boolean[] ordinals = new boolean[props.size()];
getOrderedProperties().forEach(p -> { getOrderedProperties()
.forEach(
p -> {
int ordinal = p.getOrdinal(); int ordinal = p.getOrdinal();
if (ordinal < 0 || ordinal >= ordinals.length) { if (ordinal < 0 || ordinal >= ordinals.length) {
throw new HelenusMappingException("invalid ordinal " + ordinal + " found for property " throw new HelenusMappingException(
+ p.getPropertyName() + " in " + p.getEntity()); "invalid ordinal "
+ ordinal
+ " found for property "
+ p.getPropertyName()
+ " in "
+ p.getEntity());
} }
if (ordinals[ordinal]) { if (ordinals[ordinal]) {
throw new HelenusMappingException( throw new HelenusMappingException(
"detected two or more properties with the same ordinal " + ordinal + " in " + p.getEntity()); "detected two or more properties with the same ordinal "
+ ordinal
+ " in "
+ p.getEntity());
} }
ordinals[ordinal] = true; ordinals[ordinal] = true;
@ -300,8 +314,12 @@ public final class HelenusMappingEntity implements HelenusEntity {
public String toString() { public String toString() {
StringBuilder str = new StringBuilder(); StringBuilder str = new StringBuilder();
str.append(iface.getSimpleName()).append("(").append(name.getName()).append(") ") str.append(iface.getSimpleName())
.append(type.name().toLowerCase()).append(":\n"); .append("(")
.append(name.getName())
.append(") ")
.append(type.name().toLowerCase())
.append(":\n");
for (HelenusProperty prop : getOrderedProperties()) { for (HelenusProperty prop : getOrderedProperties()) {
str.append(prop.toString()); str.append(prop.toString());

View file

@ -15,16 +15,13 @@
*/ */
package net.helenus.mapping; package net.helenus.mapping;
import com.datastax.driver.core.Metadata;
import java.lang.annotation.Annotation; import java.lang.annotation.Annotation;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.lang.reflect.Type; import java.lang.reflect.Type;
import java.util.Optional; import java.util.Optional;
import java.util.function.Function; import java.util.function.Function;
import javax.validation.ConstraintValidator; import javax.validation.ConstraintValidator;
import com.datastax.driver.core.Metadata;
import net.helenus.core.SessionRepository; import net.helenus.core.SessionRepository;
import net.helenus.mapping.javatype.AbstractJavaType; import net.helenus.mapping.javatype.AbstractJavaType;
import net.helenus.mapping.javatype.MappingJavaTypes; import net.helenus.mapping.javatype.MappingJavaTypes;
@ -63,8 +60,9 @@ public final class HelenusMappingProperty implements HelenusProperty {
this.javaType = getter.getReturnType(); this.javaType = getter.getReturnType();
this.abstractJavaType = MappingJavaTypes.resolveJavaType(this.javaType); this.abstractJavaType = MappingJavaTypes.resolveJavaType(this.javaType);
this.dataType = abstractJavaType.resolveDataType(this.getter, this.genericJavaType, this.dataType =
this.columnInfo.getColumnType(), metadata); abstractJavaType.resolveDataType(
this.getter, this.genericJavaType, this.columnInfo.getColumnType(), metadata);
this.validators = MappingUtil.getValidators(getter); this.validators = MappingUtil.getValidators(getter);
} }

View file

@ -19,9 +19,7 @@ import java.lang.annotation.Annotation;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.Optional; import java.util.Optional;
import java.util.function.Function; import java.util.function.Function;
import javax.validation.ConstraintValidator; import javax.validation.ConstraintValidator;
import net.helenus.core.SessionRepository; import net.helenus.core.SessionRepository;
import net.helenus.mapping.type.AbstractDataType; import net.helenus.mapping.type.AbstractDataType;

View file

@ -21,10 +21,8 @@ import java.lang.reflect.Method;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
import javax.validation.Constraint; import javax.validation.Constraint;
import javax.validation.ConstraintValidator; import javax.validation.ConstraintValidator;
import net.helenus.core.Getter; import net.helenus.core.Getter;
import net.helenus.core.Helenus; import net.helenus.core.Helenus;
import net.helenus.core.reflect.*; import net.helenus.core.reflect.*;
@ -35,10 +33,10 @@ import net.helenus.support.HelenusMappingException;
public final class MappingUtil { public final class MappingUtil {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public static final ConstraintValidator<? extends Annotation, ?>[] EMPTY_VALIDATORS = new ConstraintValidator[0]; public static final ConstraintValidator<? extends Annotation, ?>[] EMPTY_VALIDATORS =
new ConstraintValidator[0];
private MappingUtil() { private MappingUtil() {}
}
public static ConstraintValidator<? extends Annotation, ?>[] getValidators(Method getterMethod) { public static ConstraintValidator<? extends Annotation, ?>[] getValidators(Method getterMethod) {
@ -63,8 +61,8 @@ public final class MappingUtil {
} }
} }
private static List<ConstraintValidator<? extends Annotation, ?>> addValidators(Annotation constraintAnnotation, private static List<ConstraintValidator<? extends Annotation, ?>> addValidators(
List<ConstraintValidator<? extends Annotation, ?>> list) { Annotation constraintAnnotation, List<ConstraintValidator<? extends Annotation, ?>> list) {
Class<? extends Annotation> annotationType = constraintAnnotation.annotationType(); Class<? extends Annotation> annotationType = constraintAnnotation.annotationType();
@ -76,8 +74,8 @@ public final class MappingUtil {
for (Class<? extends ConstraintValidator<?, ?>> clazz : constraint.validatedBy()) { for (Class<? extends ConstraintValidator<?, ?>> clazz : constraint.validatedBy()) {
ConstraintValidator<? extends Annotation, ?> validator = ReflectionInstantiator ConstraintValidator<? extends Annotation, ?> validator =
.instantiateClass(clazz); ReflectionInstantiator.instantiateClass(clazz);
((ConstraintValidator) validator).initialize(constraintAnnotation); ((ConstraintValidator) validator).initialize(constraintAnnotation);
@ -109,7 +107,9 @@ public final class MappingUtil {
} }
} }
return indexName != null ? Optional.of(new IdentityName(indexName, forceQuote)) : Optional.empty(); return indexName != null
? Optional.of(new IdentityName(indexName, forceQuote))
: Optional.empty();
} }
public static boolean caseSensitiveIndex(Method getterMethod) { public static boolean caseSensitiveIndex(Method getterMethod) {

View file

@ -22,7 +22,8 @@ public enum TypeAndOrdinalColumnComparator implements Comparator<HelenusProperty
public int compare(HelenusProperty thisVal, HelenusProperty anotherVal) { public int compare(HelenusProperty thisVal, HelenusProperty anotherVal) {
int c = Integer.compare(thisVal.getColumnType().ordinal(), anotherVal.getColumnType().ordinal()); int c =
Integer.compare(thisVal.getColumnType().ordinal(), anotherVal.getColumnType().ordinal());
if (c == 0) { if (c == 0) {
c = Integer.compare(thisVal.getOrdinal(), anotherVal.getOrdinal()); c = Integer.compare(thisVal.getOrdinal(), anotherVal.getOrdinal());

View file

@ -19,44 +19,34 @@ import java.lang.annotation.ElementType;
import java.lang.annotation.Retention; import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy; import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target; import java.lang.annotation.Target;
import net.helenus.mapping.OrderingDirection; import net.helenus.mapping.OrderingDirection;
/** /**
* ClusteringColumn is the family column in legacy Cassandra API * ClusteringColumn is the family column in legacy Cassandra API
* *
* <p> * <p>The purpose of this column is have additional dimension in the table. Both @PartitionKey
* The purpose of this column is have additional dimension in the table. * and @ClusteringColumn together are parts of the primary key of the table. The primary difference
* Both @PartitionKey and @ClusteringColumn together are parts of the primary * between them is that the first one is using for routing purposes in order to locate a data node
* key of the table. The primary difference between them is that the first one * in the cluster, otherwise the second one is using inside the node to locate peace of data in
* is using for routing purposes in order to locate a data node in the cluster,
* otherwise the second one is using inside the node to locate peace of data in
* concrete machine. * concrete machine.
* *
* <p> * <p>ClusteringColumn can be represented as a Key in SortedMap that fully stored in a single node.
* ClusteringColumn can be represented as a Key in SortedMap that fully stored * All developers must be careful for selecting fields for clustering columns, because all data
* in a single node. All developers must be careful for selecting fields for * inside this SortedMap must fit in to one node.
* clustering columns, because all data inside this SortedMap must fit in to one
* node.
* *
* <p> * <p>ClusteringColumn can have more than one part and the order of parts is important. This order
* ClusteringColumn can have more than one part and the order of parts is * defines the way how Cassandra joins the parts and influence of data retrieval operations. Each
* important. This order defines the way how Cassandra joins the parts and * part can have ordering property that defines default ascending or descending order of data. In
* influence of data retrieval operations. Each part can have ordering property * case of two and more parts in select queries developer needs to have consisdent order of all
* that defines default ascending or descending order of data. In case of two * parts as they defined in table.
* and more parts in select queries developer needs to have consisdent order of
* all parts as they defined in table.
* *
* <p> * <p>For example, first part is ASC ordering, second is also ASC, so Cassandra will sort entries
* For example, first part is ASC ordering, second is also ASC, so Cassandra * like this: a-a a-b b-a b-b In this case we are able run queries: ORDER BY first ASC, second ASC
* will sort entries like this: a-a a-b b-a b-b In this case we are able run * ORDER BY first DESC, second DESC WHERE first=? ORDER BY second ASC WHERE first=? ORDER BY second
* queries: ORDER BY first ASC, second ASC ORDER BY first DESC, second DESC * DESC WHERE first=? AND second=?
* WHERE first=? ORDER BY second ASC WHERE first=? ORDER BY second DESC WHERE
* first=? AND second=?
* *
* <p> * <p>But, we can not run queries: ORDER BY first DESC, second ASC ORDER BY first ASC, second DESC
* But, we can not run queries: ORDER BY first DESC, second ASC ORDER BY first * WHERE second=? ORDER BY first (ASC,DESC)
* ASC, second DESC WHERE second=? ORDER BY first (ASC,DESC)
*/ */
@Retention(value = RetentionPolicy.RUNTIME) @Retention(value = RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@ -70,40 +60,35 @@ public @interface ClusteringColumn {
String value() default ""; String value() default "";
/** /**
* ClusteringColumn parts must be ordered in the @Table. It is the requirement * ClusteringColumn parts must be ordered in the @Table. It is the requirement of Cassandra.
* of Cassandra. Cassandra joins all parts to the final clustering key that is * Cassandra joins all parts to the final clustering key that is stored in column family name.
* stored in column family name. Additionally all parts can have some ordering * Additionally all parts can have some ordering (ASC, DESC) that with sequence of parts
* (ASC, DESC) that with sequence of parts determines key comparison function, * determines key comparison function, so Cassandra storing column family names always in sorted
* so Cassandra storing column family names always in sorted order. * order.
* *
* <p> * <p>Be default ordinal has 0 value, that's because in most cases @Table have single column for
* Be default ordinal has 0 value, that's because in most cases @Table have * ClusteringColumn If you have 2 and more parts of the ClusteringColumn, then you need to use
* single column for ClusteringColumn If you have 2 and more parts of the * ordinal() to define the sequence of the parts
* ClusteringColumn, then you need to use ordinal() to define the sequence of
* the parts
* *
* @return number that used to sort clustering columns * @return number that used to sort clustering columns
*/ */
int ordinal() default 0; int ordinal() default 0;
/** /**
* Default order of values in the ClusteringColumn This ordering is using for * Default order of values in the ClusteringColumn This ordering is using for comparison of the
* comparison of the clustering column values when Cassandra stores it in the * clustering column values when Cassandra stores it in the sorted order.
* sorted order.
* *
* <p> * <p>Default value is the ascending order
* Default value is the ascending order
* *
* @return ascending order or descending order of clustering column values * @return ascending order or descending order of clustering column values
*/ */
OrderingDirection ordering() default OrderingDirection.ASC; OrderingDirection ordering() default OrderingDirection.ASC;
/** /**
* For reserved words in Cassandra we need quotation in CQL queries. This * For reserved words in Cassandra we need quotation in CQL queries. This property marks that the
* property marks that the name of the UDT type needs to be quoted. * name of the UDT type needs to be quoted.
* *
* <p> * <p>Default value is false, we are quoting only selected names.
* Default value is false, we are quoting only selected names.
* *
* @return true if name have to be quoted * @return true if name have to be quoted
*/ */

View file

@ -18,18 +18,15 @@ package net.helenus.mapping.annotation;
import java.lang.annotation.*; import java.lang.annotation.*;
/** /**
* Column annotation is used to define additional properties of the column in * Column annotation is used to define additional properties of the column in entity mapping
* entity mapping interfaces: @Table, @UDT, @Tuple * interfaces: @Table, @UDT, @Tuple
* *
* <p> * <p>Column annotation can be used to override default name of the column or to setup order of the
* Column annotation can be used to override default name of the column or to * columns in the mapping
* setup order of the columns in the mapping
* *
* <p> * <p>Usually for @Table and @UDT types it is not important to define order of the columns, but
* Usually for @Table and @UDT types it is not important to define order of the * in @Tuple mapping it is required, because tuple itself represents the sequence of the types with
* columns, but in @Tuple mapping it is required, because tuple itself * particular order in the table's column
* represents the sequence of the types with particular order in the table's
* column
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@ -46,21 +43,18 @@ public @interface Column {
/** /**
* Ordinal will be used for ascending sorting of columns * Ordinal will be used for ascending sorting of columns
* *
* <p> * <p>Default value is 0, because not all mapping entities require all fields to have unique
* Default value is 0, because not all mapping entities require all fields to * ordinals, only @Tuple mapping entity requires all of them to be unique.
* have unique ordinals, only @Tuple mapping entity requires all of them to be
* unique.
* *
* @return number that used to sort columns, usually for @Tuple only * @return number that used to sort columns, usually for @Tuple only
*/ */
int ordinal() default 0; int ordinal() default 0;
/** /**
* For reserved words in Cassandra we need quotation in CQL queries. This * For reserved words in Cassandra we need quotation in CQL queries. This property marks that the
* property marks that the name of the UDT type needs to be quoted. * name of the UDT type needs to be quoted.
* *
* <p> * <p>Default value is false, we are quoting only selected names.
* Default value is false, we are quoting only selected names.
* *
* @return true if name have to be quoted * @return true if name have to be quoted
*/ */

View file

@ -16,106 +16,83 @@
package net.helenus.mapping.annotation; package net.helenus.mapping.annotation;
import java.lang.annotation.*; import java.lang.annotation.*;
import javax.validation.Constraint; import javax.validation.Constraint;
import net.helenus.mapping.validator.*; import net.helenus.mapping.validator.*;
/** /**
* Constraint annotations are using for data integrity mostly * Constraint annotations are using for data integrity mostly for @java.lang.String types. The place
* for @java.lang.String types. The place of the annotation is the particular * of the annotation is the particular method in model interface.
* method in model interface.
* *
* <p> * <p>All of them does not have effect on selects and data retrieval operations.
* All of them does not have effect on selects and data retrieval operations.
* *
* <p> * <p>Support types: - @NotNull supports any @java.lang.Object type - All annotations
* Support types: - @NotNull supports any @java.lang.Object type - All * support @java.lang.String type
* annotations support @java.lang.String type
*/ */
public final class Constraints { public final class Constraints {
private Constraints() { private Constraints() {}
}
/** /**
* NotNull annotation is using to check that value is not null before storing it * NotNull annotation is using to check that value is not null before storing it
* *
* <p> * <p>Applicable to use in any @java.lang.Object
* Applicable to use in any @java.lang.Object
* *
* <p> * <p>It does not check on selects and data retrieval operations
* It does not check on selects and data retrieval operations
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = NotNullValidator.class) @Constraint(validatedBy = NotNullValidator.class)
public @interface NotNull { public @interface NotNull {}
}
/** /**
* NotEmpty annotation is using to check that value has text before storing it * NotEmpty annotation is using to check that value has text before storing it
* *
* <p> * <p>Also checks for the null and it is more strict annotation then @NotNull
* Also checks for the null and it is more strict annotation then @NotNull
* *
* <p> * <p>Can be used for @java.lang.CharSequence, @ByteBuffer and any array
* Can be used for @java.lang.CharSequence, @ByteBuffer and any array
* *
* <p> * <p>It does not check on selects and data retrieval operations
* It does not check on selects and data retrieval operations
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = NotEmptyValidator.class) @Constraint(validatedBy = NotEmptyValidator.class)
public @interface NotEmpty { public @interface NotEmpty {}
}
/** /**
* Email annotation is using to check that value has a valid email before * Email annotation is using to check that value has a valid email before storing it
* storing it
* *
* <p> * <p>Can be used only for @CharSequence
* Can be used only for @CharSequence
* *
* <p> * <p>It does not check on selects and data retrieval operations
* It does not check on selects and data retrieval operations
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = EmailValidator.class) @Constraint(validatedBy = EmailValidator.class)
public @interface Email { public @interface Email {}
}
/** /**
* Number annotation is using to check that all letters in value are digits * Number annotation is using to check that all letters in value are digits before storing it
* before storing it
* *
* <p> * <p>Can be used only for @java.lang.CharSequence
* Can be used only for @java.lang.CharSequence
* *
* <p> * <p>It does not check on selects and data retrieval operations
* It does not check on selects and data retrieval operations
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = NumberValidator.class) @Constraint(validatedBy = NumberValidator.class)
public @interface Number { public @interface Number {}
}
/** /**
* Alphabet annotation is using to check that all letters in value are in * Alphabet annotation is using to check that all letters in value are in specific alphabet before
* specific alphabet before storing it * storing it
* *
* <p> * <p>Can be used only for @java.lang.CharSequence
* Can be used only for @java.lang.CharSequence
* *
* <p> * <p>It does not check on selects and data retrieval operations
* It does not check on selects and data retrieval operations
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@ -132,14 +109,11 @@ public final class Constraints {
} }
/** /**
* Length annotation is using to ensure that value has exact length before * Length annotation is using to ensure that value has exact length before storing it
* storing it
* *
* <p> * <p>Can be used for @java.lang.CharSequence, @ByteBuffer and any array
* Can be used for @java.lang.CharSequence, @ByteBuffer and any array
* *
* <p> * <p>It does not have effect on selects and data retrieval operations
* It does not have effect on selects and data retrieval operations
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@ -151,14 +125,12 @@ public final class Constraints {
} }
/** /**
* MaxLength annotation is using to ensure that value has length less or equal * MaxLength annotation is using to ensure that value has length less or equal to some threshold
* to some threshold before storing it * before storing it
* *
* <p> * <p>Can be used for @java.lang.CharSequence, @ByteBuffer and byte[]
* Can be used for @java.lang.CharSequence, @ByteBuffer and byte[]
* *
* <p> * <p>It does not have effect on selects and data retrieval operations
* It does not have effect on selects and data retrieval operations
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@ -170,14 +142,12 @@ public final class Constraints {
} }
/** /**
* MinLength annotation is using to ensure that value has length greater or * MinLength annotation is using to ensure that value has length greater or equal to some
* equal to some threshold before storing it * threshold before storing it
* *
* <p> * <p>Can be used for @java.lang.CharSequence, @ByteBuffer and byte[]
* Can be used for @java.lang.CharSequence, @ByteBuffer and byte[]
* *
* <p> * <p>It does not have effect on selects and data retrieval operations
* It does not have effect on selects and data retrieval operations
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@ -189,48 +159,38 @@ public final class Constraints {
} }
/** /**
* LowerCase annotation is using to ensure that value is in lower case before * LowerCase annotation is using to ensure that value is in lower case before storing it
* storing it
* *
* <p> * <p>Can be used only for @java.lang.CharSequence
* Can be used only for @java.lang.CharSequence
* *
* <p> * <p>It does not have effect on selects and data retrieval operations
* It does not have effect on selects and data retrieval operations
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = LowerCaseValidator.class) @Constraint(validatedBy = LowerCaseValidator.class)
public @interface LowerCase { public @interface LowerCase {}
}
/** /**
* UpperCase annotation is using to ensure that value is in upper case before * UpperCase annotation is using to ensure that value is in upper case before storing it
* storing it
* *
* <p> * <p>Can be used only for @java.lang.CharSequence
* Can be used only for @java.lang.CharSequence
* *
* <p> * <p>It does not have effect on selects and data retrieval operations
* It does not have effect on selects and data retrieval operations
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = UpperCaseValidator.class) @Constraint(validatedBy = UpperCaseValidator.class)
public @interface UpperCase { public @interface UpperCase {}
}
/** /**
* Pattern annotation is LowerCase annotation is using to ensure that value is * Pattern annotation is LowerCase annotation is using to ensure that value is upper case before
* upper case before storing it * storing it
* *
* <p> * <p>Can be used only for @java.lang.CharSequence
* Can be used only for @java.lang.CharSequence
* *
* <p> * <p>It does not have effect on selects and data retrieval operations
* It does not have effect on selects and data retrieval operations
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@ -254,14 +214,12 @@ public final class Constraints {
} }
/** /**
* Distinct annotation is used to signal, but not ensure that a value should be * Distinct annotation is used to signal, but not ensure that a value should be distinct in the
* distinct in the database. * database.
* *
* <p> * <p>Can be used only for @java.lang.CharSequence
* Can be used only for @java.lang.CharSequence
* *
* <p> * <p>It does not have effect on selects and data retrieval operations
* It does not have effect on selects and data retrieval operations
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@ -275,6 +233,5 @@ public final class Constraints {
* @return Java * @return Java
*/ */
Class<? extends Enum> value() default Enum.class; Class<? extends Enum> value() default Enum.class;
} }
} }

View file

@ -3,20 +3,17 @@ package net.helenus.mapping.annotation;
import java.lang.annotation.*; import java.lang.annotation.*;
/** /**
* CoveringIndex annotation is using under the specific column or method in * CoveringIndex annotation is using under the specific column or method in entity interface
* entity interface with @Table annotation. * with @Table annotation.
* *
* <p> * <p>A corresponding materialized view will be created based on the underline @Table for the
* A corresponding materialized view will be created based on the * specific column.
* underline @Table for the specific column.
* *
* <p> * <p>This is useful when you need to perform IN or SORT/ORDER-BY queries and to do so you'll need
* This is useful when you need to perform IN or SORT/ORDER-BY queries and to do * different materialized table on disk in Cassandra.
* so you'll need different materialized table on disk in Cassandra.
* *
* <p> * <p>For each @Table annotated interface Helenus will create/update/verify Cassandra Materialized
* For each @Table annotated interface Helenus will create/update/verify * Views and some indexes if needed on startup.
* Cassandra Materialized Views and some indexes if needed on startup.
*/ */
@Inherited @Inherited
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@ -24,8 +21,7 @@ import java.lang.annotation.*;
public @interface CoveringIndex { public @interface CoveringIndex {
/** /**
* Defined the name of the index. By default the entity name with column name as * Defined the name of the index. By default the entity name with column name as suffix.
* suffix.
* *
* @return name of the covering index * @return name of the covering index
*/ */

View file

@ -18,22 +18,18 @@ package net.helenus.mapping.annotation;
import java.lang.annotation.*; import java.lang.annotation.*;
/** /**
* Index annotation is using under the specific column or method in entity * Index annotation is using under the specific column or method in entity interface with @Table
* interface with @Table annotation. * annotation.
* *
* <p> * <p>The corresponding secondary index will be created in the underline @Table for the specific
* The corresponding secondary index will be created in the underline @Table for * column.
* the specific column.
* *
* <p> * <p>Currently Cassandra supports only single column index, so this index works only for single
* Currently Cassandra supports only single column index, so this index works * column.
* only for single column.
* *
* <p> * <p>Make sure that you are using low cardinality columns for this index, that is the requirement
* Make sure that you are using low cardinality columns for this index, that is * of the Cassandra. Low cardinality fields examples: gender, country, age, status and etc High
* the requirement of the Cassandra. Low cardinality fields examples: gender, * cardinality fields examples: id, email, timestamp, UUID and etc
* country, age, status and etc High cardinality fields examples: id, email,
* timestamp, UUID and etc
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@ -48,27 +44,22 @@ public @interface Index {
String value() default ""; String value() default "";
/** /**
* For reserved words in Cassandra we need quotation in CQL queries. This * For reserved words in Cassandra we need quotation in CQL queries. This property marks that the
* property marks that the name of the UDT type needs to be quoted. * name of the UDT type needs to be quoted.
* *
* <p> * <p>Default value is false, we are quoting only selected names.
* Default value is false, we are quoting only selected names.
* *
* @return true if name have to be quoted * @return true if name have to be quoted
*/ */
boolean forceQuote() default false; boolean forceQuote() default false;
/** /**
* Create a case-insensitive index using Cassandra 3.x+ support for SASI * Create a case-insensitive index using Cassandra 3.x+ support for SASI indexing.
* indexing.
* *
* @return true if the index should ignore case when comparing * @return true if the index should ignore case when comparing
*/ */
boolean caseSensitive() default true; boolean caseSensitive() default true;
/** /** @return */
*
* @return
*/
boolean distinct() default false; boolean distinct() default false;
} }

View file

@ -20,9 +20,7 @@ import java.lang.annotation.*;
/** /**
* Inherited Entity annotation * Inherited Entity annotation
* *
* <p> * <p>Inherited Table annotation is used to indicate that the methods should also be mapped
* Inherited Table annotation is used to indicate that the methods should also
* be mapped
*/ */
@Inherited @Inherited
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)

View file

@ -20,17 +20,13 @@ import java.lang.annotation.*;
/** /**
* Materialized alternate view of another Entity annotation * Materialized alternate view of another Entity annotation
* *
* <p> * <p>MaterializedView annotation is used to define different mapping to some other Table interface
* MaterializedView annotation is used to define different mapping to some other
* Table interface
* *
* <p> * <p>This is useful when you need to perform IN or SORT/ORDER-BY queries and to do so you'll need
* This is useful when you need to perform IN or SORT/ORDER-BY queries and to do * different materialized table on disk in Cassandra.
* so you'll need different materialized table on disk in Cassandra.
* *
* <p> * <p>For each @Table annotated interface Helenus will create/update/verify Cassandra Materialized
* For each @Table annotated interface Helenus will create/update/verify * Views and some indexes if needed on startup.
* Cassandra Materialized Views and some indexes if needed on startup.
*/ */
@Inherited @Inherited
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@ -45,11 +41,10 @@ public @interface MaterializedView {
String value() default ""; String value() default "";
/** /**
* For reserved words in Cassandra we need quotation in CQL queries. This * For reserved words in Cassandra we need quotation in CQL queries. This property marks that the
* property marks that the name of the type needs to be quoted. * name of the type needs to be quoted.
* *
* <p> * <p>Default value is false, we are quoting only selected names.
* Default value is false, we are quoting only selected names.
* *
* @return true if name have to be quoted * @return true if name have to be quoted
*/ */

View file

@ -21,20 +21,16 @@ import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target; import java.lang.annotation.Target;
/** /**
* PartitionKey annotation is using to define that particular column is the part * PartitionKey annotation is using to define that particular column is the part of partition key in
* of partition key in the table. * the table.
* *
* <p> * <p>Partition Key is the routing key. Cassandra is using it to find the primary data node in the
* Partition Key is the routing key. Cassandra is using it to find the primary * cluster that holds data. Cassandra combines all parts of the partition key to byte array and then
* data node in the cluster that holds data. Cassandra combines all parts of the * calculates hash function by using good distribution algorithm (by default MurMur3). After that it
* partition key to byte array and then calculates hash function by using good * uses hash number as a token in the ring to find a virtual and then a physical data server.
* distribution algorithm (by default MurMur3). After that it uses hash number
* as a token in the ring to find a virtual and then a physical data server.
* *
* <p> * <p>For @Table mapping entity it is required to have as minimum one PartitionKey column. For @UDT
* For @Table mapping entity it is required to have as minimum one PartitionKey * and @Tuple mapping entities @PartitionKey annotation is not using.
* column. For @UDT and @Tuple mapping entities @PartitionKey annotation is not
* using.
*/ */
@Retention(value = RetentionPolicy.RUNTIME) @Retention(value = RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@ -48,26 +44,23 @@ public @interface PartitionKey {
String value() default ""; String value() default "";
/** /**
* PartitionKey parts must be ordered in the @Table. It is the requirement of * PartitionKey parts must be ordered in the @Table. It is the requirement of Cassandra. That is
* Cassandra. That is how the partition key calculation works, column parts will * how the partition key calculation works, column parts will be joined based on some order and
* be joined based on some order and final hash/token will be calculated. * final hash/token will be calculated.
* *
* <p> * <p>Be default ordinal has 0 value, that's because in most cases @Table have single column
* Be default ordinal has 0 value, that's because in most cases @Table have * for @PartitionKey If you have 2 and more parts of the PartitionKey, then you need to use
* single column for @PartitionKey If you have 2 and more parts of the * ordinal() to define the sequence of the parts
* PartitionKey, then you need to use ordinal() to define the sequence of the
* parts
* *
* @return number that used to sort columns in PartitionKey * @return number that used to sort columns in PartitionKey
*/ */
int ordinal() default 0; int ordinal() default 0;
/** /**
* For reserved words in Cassandra we need quotation in CQL queries. This * For reserved words in Cassandra we need quotation in CQL queries. This property marks that the
* property marks that the name of the UDT type needs to be quoted. * name of the UDT type needs to be quoted.
* *
* <p> * <p>Default value is false, we are quoting only selected names.
* Default value is false, we are quoting only selected names.
* *
* @return true if name have to be quoted * @return true if name have to be quoted
*/ */

View file

@ -23,14 +23,12 @@ import java.lang.annotation.Target;
/** /**
* StaticColumn annotation is using to define a static column in Cassandra Table * StaticColumn annotation is using to define a static column in Cassandra Table
* *
* <p> * <p>It does not have effect in @UDT and @Tuple types and in @Table-s that does not
* It does not have effect in @UDT and @Tuple types and in @Table-s that does * have @ClusteringColumn-s
* not have @ClusteringColumn-s
* *
* <p> * <p>In case of using @ClusteringColumn we can repeat some information that is unique for a row.
* In case of using @ClusteringColumn we can repeat some information that is * For this purpose we can define @StaticColumn annotation, that will create static column in the
* unique for a row. For this purpose we can define @StaticColumn annotation, * table
* that will create static column in the table
*/ */
@Retention(value = RetentionPolicy.RUNTIME) @Retention(value = RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@ -51,11 +49,10 @@ public @interface StaticColumn {
int ordinal() default 0; int ordinal() default 0;
/** /**
* For reserved words in Cassandra we need quotation in CQL queries. This * For reserved words in Cassandra we need quotation in CQL queries. This property marks that the
* property marks that the name of the UDT type needs to be quoted. * name of the UDT type needs to be quoted.
* *
* <p> * <p>Default value is false, we are quoting only selected names.
* Default value is false, we are quoting only selected names.
* *
* @return true if name have to be quoted * @return true if name have to be quoted
*/ */

View file

@ -20,15 +20,12 @@ import java.lang.annotation.*;
/** /**
* Entity annotation * Entity annotation
* *
* <p> * <p>Table annotation is used to define Table mapping to some interface
* Table annotation is used to define Table mapping to some interface
* *
* <p> * <p>There are three types of Entity mapping annotations: @Table, @UDT, @Tuple
* There are three types of Entity mapping annotations: @Table, @UDT, @Tuple
* *
* <p> * <p>For each @Table annotated interface Helenus will create/update/verify Cassandra Table and some
* For each @Table annotated interface Helenus will create/update/verify * indexes if needed on startup.
* Cassandra Table and some indexes if needed on startup.
*/ */
@Inherited @Inherited
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@ -43,11 +40,10 @@ public @interface Table {
String value() default ""; String value() default "";
/** /**
* For reserved words in Cassandra we need quotation in CQL queries. This * For reserved words in Cassandra we need quotation in CQL queries. This property marks that the
* property marks that the name of the UDT type needs to be quoted. * name of the UDT type needs to be quoted.
* *
* <p> * <p>Default value is false, we are quoting only selected names.
* Default value is false, we are quoting only selected names.
* *
* @return true if name have to be quoted * @return true if name have to be quoted
*/ */

View file

@ -17,12 +17,8 @@ package net.helenus.mapping.annotation;
import java.lang.annotation.*; import java.lang.annotation.*;
/** /** Transient annotation is used to mark properties that are need not be mapped to the database. */
* Transient annotation is used to mark properties that are need not be mapped
* to the database.
*/
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Transient { public @interface Transient {}
}

View file

@ -20,19 +20,15 @@ import java.lang.annotation.*;
/** /**
* Entity annotation * Entity annotation
* *
* <p> * <p>Tuple annotation is used to define Tuple type mapping to some interface
* Tuple annotation is used to define Tuple type mapping to some interface
* *
* <p> * <p>There are three types of Entity mapping annotations: @Table, @UDT, @Tuple
* There are three types of Entity mapping annotations: @Table, @UDT, @Tuple
* *
* <p> * <p>Tuple is fully embedded type, it is the sequence of the underline types and the order of the
* Tuple is fully embedded type, it is the sequence of the underline types and * sub-types is important, therefore all @Column-s must have ordinal() and only @Column annotation
* the order of the sub-types is important, therefore all @Column-s must have * supported for underline types
* ordinal() and only @Column annotation supported for underline types
*/ */
@Inherited @Inherited
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.TYPE}) @Target({ElementType.TYPE})
public @interface Tuple { public @interface Tuple {}
}

View file

@ -15,79 +15,62 @@
*/ */
package net.helenus.mapping.annotation; package net.helenus.mapping.annotation;
import com.datastax.driver.core.DataType;
import java.lang.annotation.*; import java.lang.annotation.*;
import com.datastax.driver.core.DataType;
/** /**
* Types annotations are using for clarification of Cassandra data type for * Types annotations are using for clarification of Cassandra data type for particular Java type.
* particular Java type.
* *
* <p> * <p>Sometimes it is possible to have for single Java type multiple Cassandra data types: - @String
* Sometimes it is possible to have for single Java type multiple Cassandra data * can be @DataType.Name.ASCII or @DataType.Name.TEXT or @DataType.Name.VARCHAR - @Long can
* types: - @String can be @DataType.Name.ASCII or @DataType.Name.TEXT * be @DataType.Name.BIGINT or @DataType.Name.COUNTER
* or @DataType.Name.VARCHAR - @Long can be @DataType.Name.BIGINT
* or @DataType.Name.COUNTER
* *
* <p> * <p>All those type annotations simplify mapping between Java types and Cassandra data types. They
* All those type annotations simplify mapping between Java types and Cassandra * are not required, for each Java type there is a default Cassandra data type in Helenus, but in
* data types. They are not required, for each Java type there is a default * some cases you would like to control mapping to make sure that the right Cassandra data type is
* Cassandra data type in Helenus, but in some cases you would like to control * using.
* mapping to make sure that the right Cassandra data type is using.
* *
* <p> * <p>For complex types like collections, UDF and Tuple types all those annotations are using to
* For complex types like collections, UDF and Tuple types all those annotations * clarify the sub-type(s) or class/UDF names.
* are using to clarify the sub-type(s) or class/UDF names.
* *
* <p> * <p>Has significant effect on schema operations.
* Has significant effect on schema operations.
*/ */
public final class Types { public final class Types {
private Types() { private Types() {}
}
/** Says to use @DataType.Name.ASCII data type in schema Java type is @String */ /** Says to use @DataType.Name.ASCII data type in schema Java type is @String */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Ascii { public @interface Ascii {}
}
/** Says to use @DataType.Name.BIGINT data type in schema Java type is @Long */ /** Says to use @DataType.Name.BIGINT data type in schema Java type is @Long */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Bigint { public @interface Bigint {}
}
/** /**
* Says to use @DataType.Name.BLOB data type in schema Java type is @ByteBuffer * Says to use @DataType.Name.BLOB data type in schema Java type is @ByteBuffer or @byte[] Using
* or @byte[] Using by default * by default
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Blob { public @interface Blob {}
}
/** /**
* Says to use @DataType.Name.LIST data type in schema with specific sub-type * Says to use @DataType.Name.LIST data type in schema with specific sub-type Java type is @List
* Java type is @List
* *
* <p> * <p>Helenus does not allow to use a specific implementation of the collection thereof data
* Helenus does not allow to use a specific implementation of the collection * retrieval operation result can be a collection with another implementation.
* thereof data retrieval operation result can be a collection with another
* implementation.
* *
* <p> * <p>This annotation is usually used only for sub-types clarification and only in case if
* This annotation is usually used only for sub-types clarification and only in * sub-type is Java type that corresponds to multiple Cassandra data types.
* case if sub-type is Java type that corresponds to multiple Cassandra data
* types.
* *
* <p> * <p>For this type there are special operations: prepend, prependAll, setIdx, append, appendAll,
* For this type there are special operations: prepend, prependAll, setIdx, * discard and discardAll in @UpdateOperation
* append, appendAll, discard and discardAll in @UpdateOperation
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@ -95,11 +78,10 @@ public final class Types {
public @interface List { public @interface List {
/** /**
* Clarification of using the sub-type data type in the collection. It supports * Clarification of using the sub-type data type in the collection. It supports only simple data
* only simple data type (not Collection, UDT or Tuple) * type (not Collection, UDT or Tuple)
* *
* <p> * <p>In case if you need UDT sub-type in the list, consider @UDTList annotation
* In case if you need UDT sub-type in the list, consider @UDTList annotation
* *
* @return data type name of the value * @return data type name of the value
*/ */
@ -107,22 +89,15 @@ public final class Types {
} }
/** /**
* Says to use @DataType.Name.MAP data type in schema with specific sub-types * Says to use @DataType.Name.MAP data type in schema with specific sub-types Java type is @Map
* Java type is @Map
* *
* <p> * <p>Helenus does not allow to use a specific implementation of the collection thereof data
* Helenus does not allow to use a specific implementation of the collection * retrieval operation result can be a collection with another implementation.
* thereof data retrieval operation result can be a collection with another
* implementation.
* *
* <p> * <p>This annotation is usually used only for sub-types clarification and only in case if
* This annotation is usually used only for sub-types clarification and only in * sub-type is Java type that corresponds to multiple Cassandra data types.
* case if sub-type is Java type that corresponds to multiple Cassandra data
* types.
* *
* <p> * <p>For this type there are special operations: put and putAll in @UpdateOperation.
* For this type there are special operations: put and putAll
* in @UpdateOperation.
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@ -130,24 +105,22 @@ public final class Types {
public @interface Map { public @interface Map {
/** /**
* Clarification of using the sub-type data type in the collection. It supports * Clarification of using the sub-type data type in the collection. It supports only simple data
* only simple data type (not Collection, UDT or Tuple) * type (not Collection, UDT or Tuple)
* *
* <p> * <p>In case if you need UDT key sub-type in the map, consider @UDTKeyMap or @UDTMap
* In case if you need UDT key sub-type in the map, consider @UDTKeyMap * annotations
* or @UDTMap annotations
* *
* @return data type name of the key * @return data type name of the key
*/ */
DataType.Name key(); DataType.Name key();
/** /**
* Clarification of using the sub-type data type in the collection. It supports * Clarification of using the sub-type data type in the collection. It supports only simple data
* only simple data type (not Collection, UDT or Tuple) * type (not Collection, UDT or Tuple)
* *
* <p> * <p>In case if you need UDT value sub-type in the map, consider @UDTValueMap or @UDTMap
* In case if you need UDT value sub-type in the map, consider @UDTValueMap * annotations
* or @UDTMap annotations
* *
* @return data type name of the value * @return data type name of the value
*/ */
@ -157,33 +130,24 @@ public final class Types {
/** /**
* Says to use @DataType.Name.COUNTER type in schema Java type is @Long * Says to use @DataType.Name.COUNTER type in schema Java type is @Long
* *
* <p> * <p>For this type there are special operations: increment and decrement in @UpdateOperation. You
* For this type there are special operations: increment and decrement * do not need to initialize counter value, it will be done automatically by Cassandra.
* in @UpdateOperation. You do not need to initialize counter value, it will be
* done automatically by Cassandra.
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Counter { public @interface Counter {}
}
/** /**
* Says to use @DataType.Name.SET data type in schema with specific sub-type * Says to use @DataType.Name.SET data type in schema with specific sub-type Java type is @Set
* Java type is @Set
* *
* <p> * <p>Helenus does not allow to use a specific implementation of the collection thereof data
* Helenus does not allow to use a specific implementation of the collection * retrieval operation result can be a collection with another implementation.
* thereof data retrieval operation result can be a collection with another
* implementation.
* *
* <p> * <p>This annotation is usually used only for sub-types clarification and only in case if
* This annotation is usually used only for sub-types clarification and only in * sub-type is Java type that corresponds to multiple Cassandra data types.
* case if sub-type is Java type that corresponds to multiple Cassandra data
* types.
* *
* <p> * <p>For this type there are special operations: add, addAll, remove and removeAll
* For this type there are special operations: add, addAll, remove and removeAll
* in @UpdateOperation. * in @UpdateOperation.
*/ */
@Documented @Documented
@ -192,11 +156,10 @@ public final class Types {
public @interface Set { public @interface Set {
/** /**
* Clarification of using the sub-type data type in the collection. It supports * Clarification of using the sub-type data type in the collection. It supports only simple data
* only simple data type (not Collection, UDT or Tuple) * type (not Collection, UDT or Tuple)
* *
* <p> * <p>In case if you need UDT sub-type in the set, consider @UDTSet annotation
* In case if you need UDT sub-type in the set, consider @UDTSet annotation
* *
* @return data type name of the value * @return data type name of the value
*/ */
@ -204,12 +167,10 @@ public final class Types {
} }
/** /**
* Says to use @DataType.Name.CUSTOM type in schema Java type is @ByteBuffer * Says to use @DataType.Name.CUSTOM type in schema Java type is @ByteBuffer or @byte[]
* or @byte[]
* *
* <p> * <p>Uses for custom user types that has special implementation. Helenus does not deal with this
* Uses for custom user types that has special implementation. Helenus does not * class directly for now, uses only in serialized form.
* deal with this class directly for now, uses only in serialized form.
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@ -224,53 +185,39 @@ public final class Types {
String className(); String className();
} }
/** /** Says to use @DataType.Name.TEXT type in schema Java type is @String Using by default */
* Says to use @DataType.Name.TEXT type in schema Java type is @String Using by
* default
*/
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Text { public @interface Text {}
}
/** /** Says to use @DataType.Name.TIMESTAMP type in schema Java type is @Date Using by default */
* Says to use @DataType.Name.TIMESTAMP type in schema Java type is @Date Using
* by default
*/
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Timestamp { public @interface Timestamp {}
}
/** /** Says to use @DataType.Name.TIMEUUID type in schema Java type is @UUID or @Date */
* Says to use @DataType.Name.TIMEUUID type in schema Java type is @UUID
* or @Date
*/
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Timeuuid { public @interface Timeuuid {}
}
/** /**
* Says to use @DataType.Name.TUPLE type in schema Java type is @TupleValue or * Says to use @DataType.Name.TUPLE type in schema Java type is @TupleValue or model interface
* model interface with @Tuple annotation * with @Tuple annotation
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
public @interface Tuple { public @interface Tuple {
/** /**
* If Java type is the @TupleValue then this field is required. Any Cassandra * If Java type is the @TupleValue then this field is required. Any Cassandra Tuple is the
* Tuple is the sequence of Cassandra types. For now Helenus supports only * sequence of Cassandra types. For now Helenus supports only simple data types in tuples
* simple data types in tuples for @TupleValue Java type * for @TupleValue Java type
* *
* <p> * <p>In case if Java type is the model interface with @Tuple annotation then all methods in
* In case if Java type is the model interface with @Tuple annotation then all * this interface can have Types annotations that can be complex types as well.
* methods in this interface can have Types annotations that can be complex
* types as well.
* *
* @return data type name sequence * @return data type name sequence
*/ */
@ -278,8 +225,8 @@ public final class Types {
} }
/** /**
* Says to use @DataType.Name.UDT type in schema Java type is @UDTValue or model * Says to use @DataType.Name.UDT type in schema Java type is @UDTValue or model interface
* interface with @UDT annotation * with @UDT annotation
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@ -287,17 +234,13 @@ public final class Types {
public @interface UDT { public @interface UDT {
/** /**
* If Java type is the @UDTValue then this field is required. Any Cassandra UDT * If Java type is the @UDTValue then this field is required. Any Cassandra UDT has name and
* has name and must be created before this use as a Cassandra Type. * must be created before this use as a Cassandra Type.
* *
* <p> * <p>This value is the UDT name of the Cassandra Type that was already created in the schema
* This value is the UDT name of the Cassandra Type that was already created in
* the schema
* *
* <p> * <p>In case of Java type is the model interface with @UDT annotation then this field is not
* In case of Java type is the model interface with @UDT annotation then this * using since model interface defines UserDefinedType with specific name
* field is not using since model interface defines UserDefinedType with
* specific name
* *
* @return UDT name * @return UDT name
*/ */
@ -306,13 +249,10 @@ public final class Types {
/** /**
* Only used for JavaType @UDTValue * Only used for JavaType @UDTValue
* *
* <p> * <p>In case if value() method returns reserved word that can not be used as a name of UDT then
* In case if value() method returns reserved word that can not be used as a * forceQuote will add additional quotes around this name in all CQL queries.
* name of UDT then forceQuote will add additional quotes around this name in
* all CQL queries.
* *
* <p> * <p>Default value is false.
* Default value is false.
* *
* @return true if quotation is needed * @return true if quotation is needed
*/ */
@ -320,22 +260,16 @@ public final class Types {
} }
/** /**
* Says to use @DataType.Name.MAP data type in schema with specific UDT sub-type * Says to use @DataType.Name.MAP data type in schema with specific UDT sub-type as a key and
* as a key and simple sub-type as a value Java type is @Map * simple sub-type as a value Java type is @Map
* *
* <p> * <p>Helenus does not allow to use a specific implementation of the collection thereof data
* Helenus does not allow to use a specific implementation of the collection * retrieval operation result can be a collection with another implementation.
* thereof data retrieval operation result can be a collection with another
* implementation.
* *
* <p> * <p>This annotation is usually used only for sub-types clarification and only in case if
* This annotation is usually used only for sub-types clarification and only in * sub-type is Java type that corresponds to multiple Cassandra data types.
* case if sub-type is Java type that corresponds to multiple Cassandra data
* types.
* *
* <p> * <p>For this type there are special operations: put and putAll in @UpdateOperation.
* For this type there are special operations: put and putAll
* in @UpdateOperation.
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@ -350,12 +284,10 @@ public final class Types {
UDT key(); UDT key();
/** /**
* Clarification of using the sub-type data type in the collection. It supports * Clarification of using the sub-type data type in the collection. It supports only simple data
* only simple data type (not Collection, UDT or Tuple) * type (not Collection, UDT or Tuple)
* *
* <p> * <p>In case if you need UDT value sub-type in the map, consider @UDTMap annotations
* In case if you need UDT value sub-type in the map, consider @UDTMap
* annotations
* *
* @return data type name of the value * @return data type name of the value
*/ */
@ -363,22 +295,17 @@ public final class Types {
} }
/** /**
* Says to use @DataType.Name.LIST data type in schema with specific UDT * Says to use @DataType.Name.LIST data type in schema with specific UDT sub-type Java type
* sub-type Java type is @List * is @List
* *
* <p> * <p>Helenus does not allow to use a specific implementation of the collection thereof data
* Helenus does not allow to use a specific implementation of the collection * retrieval operation result can be a collection with another implementation.
* thereof data retrieval operation result can be a collection with another
* implementation.
* *
* <p> * <p>This annotation is usually used only for sub-types clarification and only in case if
* This annotation is usually used only for sub-types clarification and only in * sub-type is Java type that corresponds to multiple Cassandra data types.
* case if sub-type is Java type that corresponds to multiple Cassandra data
* types.
* *
* <p> * <p>For this type there are special operations: prepend, prependAll, setIdx, append, appendAll,
* For this type there are special operations: prepend, prependAll, setIdx, * discard and discardAll in @UpdateOperation
* append, appendAll, discard and discardAll in @UpdateOperation
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@ -394,22 +321,16 @@ public final class Types {
} }
/** /**
* Says to use @DataType.Name.MAP data type in schema with specific UDT * Says to use @DataType.Name.MAP data type in schema with specific UDT sub-types Java type
* sub-types Java type is @Map * is @Map
* *
* <p> * <p>Helenus does not allow to use a specific implementation of the collection thereof data
* Helenus does not allow to use a specific implementation of the collection * retrieval operation result can be a collection with another implementation.
* thereof data retrieval operation result can be a collection with another
* implementation.
* *
* <p> * <p>This annotation is usually used only for sub-types clarification and only in case if
* This annotation is usually used only for sub-types clarification and only in * sub-type is Java type that corresponds to multiple Cassandra data types.
* case if sub-type is Java type that corresponds to multiple Cassandra data
* types.
* *
* <p> * <p>For this type there are special operations: put and putAll in @UpdateOperation.
* For this type there are special operations: put and putAll
* in @UpdateOperation.
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@ -424,8 +345,7 @@ public final class Types {
UDT key(); UDT key();
/** /**
* Clarification of using the UDT data type as a value sub-type in the * Clarification of using the UDT data type as a value sub-type in the collection.
* collection.
* *
* @return annotation of the UDT value * @return annotation of the UDT value
*/ */
@ -433,21 +353,15 @@ public final class Types {
} }
/** /**
* Says to use @DataType.Name.SET data type in schema with specific UDT sub-type * Says to use @DataType.Name.SET data type in schema with specific UDT sub-type Java type is @Set
* Java type is @Set
* *
* <p> * <p>Helenus does not allow to use a specific implementation of the collection thereof data
* Helenus does not allow to use a specific implementation of the collection * retrieval operation result can be a collection with another implementation.
* thereof data retrieval operation result can be a collection with another
* implementation.
* *
* <p> * <p>This annotation is usually used only for sub-types clarification and only in case if
* This annotation is usually used only for sub-types clarification and only in * sub-type is Java type that corresponds to multiple Cassandra data types.
* case if sub-type is Java type that corresponds to multiple Cassandra data
* types.
* *
* <p> * <p>For this type there are special operations: add, addAll, remove and removeAll
* For this type there are special operations: add, addAll, remove and removeAll
* in @UpdateOperation. * in @UpdateOperation.
*/ */
@Documented @Documented
@ -464,22 +378,16 @@ public final class Types {
} }
/** /**
* Says to use @DataType.Name.MAP data type in schema with specific simple * Says to use @DataType.Name.MAP data type in schema with specific simple sub-type as a key and
* sub-type as a key and UDT sub-type as a value Java type is @Map * UDT sub-type as a value Java type is @Map
* *
* <p> * <p>Helenus does not allow to use a specific implementation of the collection thereof data
* Helenus does not allow to use a specific implementation of the collection * retrieval operation result can be a collection with another implementation.
* thereof data retrieval operation result can be a collection with another
* implementation.
* *
* <p> * <p>This annotation is usually used only for sub-types clarification and only in case if
* This annotation is usually used only for sub-types clarification and only in * sub-type is Java type that corresponds to multiple Cassandra data types.
* case if sub-type is Java type that corresponds to multiple Cassandra data
* types.
* *
* <p> * <p>For this type there are special operations: put and putAll in @UpdateOperation.
* For this type there are special operations: put and putAll
* in @UpdateOperation.
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@ -487,39 +395,32 @@ public final class Types {
public @interface UDTValueMap { public @interface UDTValueMap {
/** /**
* Clarification of using the sub-type data type in the collection. It supports * Clarification of using the sub-type data type in the collection. It supports only simple data
* only simple data type (not Collection, UDT or Tuple) * type (not Collection, UDT or Tuple)
* *
* <p> * <p>In case if you need UDT key sub-type in the map, consider @UDTMap annotations
* In case if you need UDT key sub-type in the map, consider @UDTMap annotations
* *
* @return data type name of the key * @return data type name of the key
*/ */
DataType.Name key(); DataType.Name key();
/** /**
* Clarification of using the UDT data type as a value sub-type in the * Clarification of using the UDT data type as a value sub-type in the collection.
* collection.
* *
* @return annotation of the UDT value * @return annotation of the UDT value
*/ */
UDT value(); UDT value();
} }
/** /** Says to use @DataType.Name.UUID type in schema Java type is @UUID Using by default */
* Says to use @DataType.Name.UUID type in schema Java type is @UUID Using by
* default
*/
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Uuid { public @interface Uuid {}
}
/** Says to use @DataType.Name.VARCHAR type in schema Java type is @String */ /** Says to use @DataType.Name.VARCHAR type in schema Java type is @String */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Varchar { public @interface Varchar {}
}
} }

View file

@ -39,11 +39,10 @@ public @interface UDT {
String value() default ""; String value() default "";
/** /**
* For reserved words in Cassandra we need quotation in CQL queries. This * For reserved words in Cassandra we need quotation in CQL queries. This property marks that the
* property marks that the name of the UDT type needs to be quoted. * name of the UDT type needs to be quoted.
* *
* <p> * <p>Default value is false, we are quoting only selected names.
* Default value is false, we are quoting only selected names.
* *
* @return true if name have to be quoted * @return true if name have to be quoted
*/ */

View file

@ -16,7 +16,6 @@
package net.helenus.mapping.convert; package net.helenus.mapping.convert;
import java.util.Map; import java.util.Map;
import net.helenus.core.Helenus; import net.helenus.core.Helenus;
import net.helenus.core.reflect.MapExportable; import net.helenus.core.reflect.MapExportable;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;

View file

@ -15,9 +15,8 @@
*/ */
package net.helenus.mapping.convert; package net.helenus.mapping.convert;
import java.util.function.Function;
import com.google.common.base.CaseFormat; import com.google.common.base.CaseFormat;
import java.util.function.Function;
public enum CamelCaseToUnderscoreConverter implements Function<String, String> { public enum CamelCaseToUnderscoreConverter implements Function<String, String> {
INSTANCE; INSTANCE;

View file

@ -18,7 +18,6 @@ package net.helenus.mapping.convert;
import java.util.Date; import java.util.Date;
import java.util.UUID; import java.util.UUID;
import java.util.function.Function; import java.util.function.Function;
import net.helenus.support.Timeuuid; import net.helenus.support.Timeuuid;
/** Simple Date to TimeUUID Converter */ /** Simple Date to TimeUUID Converter */

Some files were not shown because too many files have changed in this diff Show more