Merge branch 'develop'

This commit is contained in:
Greg Burd 2017-11-02 16:32:22 -04:00
commit 9eaa53c5f0
294 changed files with 15792 additions and 15114 deletions

View file

@ -3,7 +3,6 @@
<component name="EclipseCodeFormatterProjectSettings"> <component name="EclipseCodeFormatterProjectSettings">
<option name="projectSpecificProfile"> <option name="projectSpecificProfile">
<ProjectSpecificProfile> <ProjectSpecificProfile>
<option name="formatter" value="ECLIPSE" />
<option name="pathToConfigFileJava" value="$PROJECT_DIR$/../newton/formatting/onshape-eclipse-general-preferences.epf" /> <option name="pathToConfigFileJava" value="$PROJECT_DIR$/../newton/formatting/onshape-eclipse-general-preferences.epf" />
</ProjectSpecificProfile> </ProjectSpecificProfile>
</option> </option>

261
NOTES
View file

@ -1,172 +1,27 @@
Operation/
|-- AbstractStatementOperation
| |-- AbstractOperation
| | |-- AbstractFilterOperation
| | | |-- CountOperation
| | | |-- DeleteOperation
| | | `-- UpdateOperation
| | |-- BoundOperation
| | `-- InsertOperation
| |-- AbstractOptionalOperation
| | |-- AbstractFilterOptionalOperation
| | | |-- SelectFirstOperation
| | | `-- SelectFirstTransformingOperation
| | `-- BoundOptionalOperation
| `-- AbstractStreamOperation
| |-- AbstractFilterStreamOperation
| | |-- SelectOperation
| | `-- SelectTransformingOperation
| `-- BoundStreamOperation
|-- PreparedOperation
|-- PreparedOptionalOperation
`-- PreparedStreamOperation
--- Cache
// `E` is the type of the Entity class or one of:
// - ResultSet
// - ArrayTuple{N}
// - Count
// `F` is the type argument passed to us from HelenusSession DSL and carried on via one of the
// Operation classes, it is going to be one of:
// - ResultSet
// - ArrayTuple{N}
// - or a type previously registered as a HelenusEntity.
// In the form of a:
// - Stream<?> or an
// - Optional<?>
//
// Operation/
// |-- AbstractStatementOperation
// | |-- AbstractOperation
// | | |-- AbstractFilterOperation
// | | | |-- CountOperation
// | | | |-- DeleteOperation
// | | | `-- UpdateOperation
// | | |-- BoundOperation
// | | `-- InsertOperation
// | |-- AbstractOptionalOperation
// | | |-- AbstractFilterOptionalOperation
// | | | |-- SelectFirstOperation
// | | | `-- SelectFirstTransformingOperation
// | | `-- BoundOptionalOperation
// | `-- AbstractStreamOperation
// | |-- AbstractFilterStreamOperation
// | | |-- SelectOperation
// | | `-- SelectTransformingOperation
// | `-- BoundStreamOperation
// |-- PreparedOperation
// |-- PreparedOptionalOperation
// `-- PreparedStreamOperation
//
// These all boil down to: Select, Update, Insert, Delete and Count
//
// -- Select:
// 1) Select statements that contain all primary key information will be "distinct" and
// result in a single value or no match.
// If present, return cached entity otherwise execute query and cache result.
//
// 2) Otherwise the result is a set, possibly empty, of values that match.
// When within a UOW:
// If present, return the cached value(s) from the statement cache matching the query string.
// Otherwise, execute query and cache the result in the statement cache and update/merge the
// entites into the entity cache.
// NOTE: When we read data from the database we augment the select clause with TTL and write time
// stamps for all columns that record such information so as to be able to properlty expire
// and merge values in the cache.
//
// -- Update:
// Execute the database statement and then iff successs upsert the entity being updated into the
// entity cache.
//
// -- Insert/Upsert:
// Same as Update.
//
// -- Delete:
// Same as update, only remove the cached value from all caches on success.
//
// -- Count:
// If operating within a UOW lookup count in statement cache, if not present execute query and cache result.
//
if (delegate instanceof SelectOperation) {
SelectOperation<E> op = (SelectOperation<E>) delegate;
// Determine if we are caching and if so where.
AbstractCache<CacheKey, Set<E>> cache = delegate.getCache();
boolean prepareStatementForCaching = cache != null;
if (uow != null) {
prepareStatementForCaching = true;
cache = uow.<Set<E>>getCacheEnclosing(cache);
}
// The delegate will provide the cache key becuase it will either be:
// a) when distinct: the combination of the partition/cluster key columns
// b) otherwise: the table name followed by the portion of the SQL statement that would form the WHERE clause
CacheKey key = (cache == null) ? null : delegate.getCacheKey();
if (key != null && cache != null) {
Set<E> value = cache.get(key);
if (value != null) {
// Select will always return a Stream<E>
// TODO(gburd): SelectTransforming... apply fn here?
result = (E) value.stream();
if (cacheHitCounter != null) {
cacheHitCounter.inc();
}
if (log != null) {
log.info("cache hit");
}
return result;
} else {
if (cacheMissCounter != null) {
cacheMissCounter.inc();
}
if (log != null) {
log.info("cache miss");
}
}
}
}
if (cache != null) {
Object obj = delegate.unwrap(result);
if (obj != null) {
cache.put(key, obj);
}
delegate.<E>extract(result, key, cache);
}
}
}
// TODO: first, ask the delegate for the cacheKey
// if this is a SELECT query:
// if not in cache build the statement, execute the future, cache the result, transform the result then cache the transformations
// if INSERT/UPSERT/UPDATE
// if DELETE
// if COUNT
----------------------------
@Override
public CacheKey getCacheKey() {
List<String>keys = new ArrayList<>(filters.size());
HelenusEntity entity = props.get(0).getEntity();
for (HelenusPropertyNode prop : props) {
switch(prop.getProperty().getColumnType()) {
case PARTITION_KEY:
case CLUSTERING_COLUMN:
Filter filter = filters.get(prop.getProperty());
if (filter != null) {
keys.add(filter.toString());
} else {
// we're missing a part of the primary key, so we can't create a proper cache key
return null;
}
break;
default:
// We've past the primary key components in this ordered list, so we're done building
// the cache key.
if (keys.size() > 0) {
return new CacheKey(entity, Joiner.on(",").join(keys));
}
return null;
}
}
return null;
}
---------------------------
// TODO(gburd): create a statement that matches one that wasn't prepared // TODO(gburd): create a statement that matches one that wasn't prepared
//String key = //String key =
// "use " + preparedStatement.getQueryKeyspace() + "; " + preparedStatement.getQueryString(); // "use " + preparedStatement.getQueryKeyspace() + "; " + preparedStatement.getQueryString();
@ -175,64 +30,6 @@
//} //}
------------------------
package net.helenus.core.operation;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.ResultSetFuture;
import com.datastax.driver.core.Statement;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.RemovalListener;
import com.google.common.cache.RemovalNotification;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
public abstract class AbstractCache<K, V> {
final Logger logger = LoggerFactory.getLogger(getClass());
public Cache<K, V> cache;
public AbstractCache() {
RemovalListener<K, V> listener =
new RemovalListener<K, V>() {
@Override
public void onRemoval(RemovalNotification<K, V> n) {
if (n.wasEvicted()) {
String cause = n.getCause().name();
logger.info(cause);
}
}
};
cache = CacheBuilder.newBuilder()
.maximumSize(10_000)
.expireAfterAccess(20, TimeUnit.MINUTES)
.weakKeys()
.softValues()
.removalListener(listener)
.build();
}
V get(K key) {
return cache.getIfPresent(key);
}
void put(K key, V value) {
cache.put(key, value);
}
}
------------------------------------------------------------------------------------------------
cache entites (2 methods) marked @Cacheable
cache entites in txn context
cache results when .cache() chained before .{a}sync() call, return a EvictableCacheItem<E> that has an .evict() method
fix txn .andThen() chains
primitive types have default values, (e.g. boolean, int, ...) but primative wrapper classes do not and can be null (e.g. Boolean, Integer, ...) primitive types have default values, (e.g. boolean, int, ...) but primative wrapper classes do not and can be null (e.g. Boolean, Integer, ...)
@ -372,3 +169,17 @@ begin:
cache.put cache.put
} }
*/ */
------------------
InsertOperation
Class<?> iface = entity.getMappingInterface();
boolean includesNonIdentityValues = values.stream().map(t -> {
ColumnType type = t._1.getProperty().getColumnType();
return !((type == ColumnType.PARTITION_KEY) || (type == ColumnType.CLUSTERING_COLUMN));
})
.reduce(false, (acc, t) -> acc || t);
if (resultType == iface) {
if (values.size() > 0 && includesNonIdentityValues) {
boolean immutable = iface.isAssignableFrom(Drafted.class);

View file

@ -5,19 +5,19 @@ import java.util.List;
public class DefaultMetadata extends Metadata { public class DefaultMetadata extends Metadata {
public DefaultMetadata() { public DefaultMetadata() {
super(null); super(null);
} }
private DefaultMetadata(Cluster.Manager cluster) { private DefaultMetadata(Cluster.Manager cluster) {
super(cluster); super(cluster);
} }
public TupleType newTupleType(DataType... types) { public TupleType newTupleType(DataType... types) {
return newTupleType(Arrays.asList(types)); return newTupleType(Arrays.asList(types));
} }
public TupleType newTupleType(List<DataType> types) { public TupleType newTupleType(List<DataType> types) {
return new TupleType(types, ProtocolVersion.NEWEST_SUPPORTED, CodecRegistry.DEFAULT_INSTANCE); return new TupleType(types, ProtocolVersion.NEWEST_SUPPORTED, CodecRegistry.DEFAULT_INSTANCE);
} }
} }

View file

@ -15,35 +15,34 @@
*/ */
package com.datastax.driver.core.querybuilder; package com.datastax.driver.core.querybuilder;
import java.util.List;
import com.datastax.driver.core.CodecRegistry; import com.datastax.driver.core.CodecRegistry;
import java.util.List;
public class IsNotNullClause extends Clause { public class IsNotNullClause extends Clause {
final String name; final String name;
public IsNotNullClause(String name) { public IsNotNullClause(String name) {
this.name = name; this.name = name;
} }
@Override @Override
String name() { String name() {
return name; return name;
} }
@Override @Override
Object firstValue() { Object firstValue() {
return null; return null;
} }
@Override @Override
void appendTo(StringBuilder sb, List<Object> variables, CodecRegistry codecRegistry) { void appendTo(StringBuilder sb, List<Object> variables, CodecRegistry codecRegistry) {
Utils.appendName(name, sb).append(" IS NOT NULL"); Utils.appendName(name, sb).append(" IS NOT NULL");
} }
@Override @Override
boolean containsBindMarker() { boolean containsBindMarker() {
return false; return false;
} }
} }

View file

@ -6,143 +6,150 @@ import com.google.common.base.Optional;
public class CreateCustomIndex extends CreateIndex { public class CreateCustomIndex extends CreateIndex {
private String indexName; private String indexName;
private boolean ifNotExists = false; private boolean ifNotExists = false;
private Optional<String> keyspaceName = Optional.absent(); private Optional<String> keyspaceName = Optional.absent();
private String tableName; private String tableName;
private String columnName; private String columnName;
private boolean keys; private boolean keys;
CreateCustomIndex(String indexName) { CreateCustomIndex(String indexName) {
super(indexName); super(indexName);
validateNotEmpty(indexName, "Index name"); validateNotEmpty(indexName, "Index name");
validateNotKeyWord(indexName, validateNotKeyWord(
String.format("The index name '%s' is not allowed because it is a reserved keyword", indexName)); indexName,
this.indexName = indexName; String.format(
} "The index name '%s' is not allowed because it is a reserved keyword", indexName));
this.indexName = indexName;
}
/** /**
* Add the 'IF NOT EXISTS' condition to this CREATE INDEX statement. * Add the 'IF NOT EXISTS' condition to this CREATE INDEX statement.
* *
* @return this CREATE INDEX statement. * @return this CREATE INDEX statement.
*/ */
public CreateIndex ifNotExists() { public CreateIndex ifNotExists() {
this.ifNotExists = true; this.ifNotExists = true;
return this; return this;
} }
/** /**
* Specify the keyspace and table to create the index on. * Specify the keyspace and table to create the index on.
* *
* @param keyspaceName * @param keyspaceName the keyspace name.
* the keyspace name. * @param tableName the table name.
* @param tableName * @return a {@link CreateIndex.CreateIndexOn} that will allow the specification of the column.
* the table name. */
* @return a {@link CreateIndex.CreateIndexOn} that will allow the specification public CreateIndex.CreateIndexOn onTable(String keyspaceName, String tableName) {
* of the column. validateNotEmpty(keyspaceName, "Keyspace name");
*/ validateNotEmpty(tableName, "Table name");
public CreateIndex.CreateIndexOn onTable(String keyspaceName, String tableName) { validateNotKeyWord(
validateNotEmpty(keyspaceName, "Keyspace name"); keyspaceName,
validateNotEmpty(tableName, "Table name"); String.format(
validateNotKeyWord(keyspaceName, "The keyspace name '%s' is not allowed because it is a reserved keyword",
String.format("The keyspace name '%s' is not allowed because it is a reserved keyword", keyspaceName)); keyspaceName));
validateNotKeyWord(tableName, validateNotKeyWord(
String.format("The table name '%s' is not allowed because it is a reserved keyword", tableName)); tableName,
this.keyspaceName = Optional.fromNullable(keyspaceName); String.format(
this.tableName = tableName; "The table name '%s' is not allowed because it is a reserved keyword", tableName));
return new CreateCustomIndex.CreateIndexOn(); this.keyspaceName = Optional.fromNullable(keyspaceName);
} this.tableName = tableName;
return new CreateCustomIndex.CreateIndexOn();
}
/** /**
* Specify the table to create the index on. * Specify the table to create the index on.
* *
* @param tableName * @param tableName the table name.
* the table name. * @return a {@link CreateIndex.CreateIndexOn} that will allow the specification of the column.
* @return a {@link CreateIndex.CreateIndexOn} that will allow the specification */
* of the column. public CreateIndex.CreateIndexOn onTable(String tableName) {
*/ validateNotEmpty(tableName, "Table name");
public CreateIndex.CreateIndexOn onTable(String tableName) { validateNotKeyWord(
validateNotEmpty(tableName, "Table name"); tableName,
validateNotKeyWord(tableName, String.format(
String.format("The table name '%s' is not allowed because it is a reserved keyword", tableName)); "The table name '%s' is not allowed because it is a reserved keyword", tableName));
this.tableName = tableName; this.tableName = tableName;
return new CreateCustomIndex.CreateIndexOn(); return new CreateCustomIndex.CreateIndexOn();
} }
String getCustomClassName() { String getCustomClassName() {
return ""; return "";
} }
String getOptions() { String getOptions() {
return ""; return "";
} }
@Override @Override
public String buildInternal() { public String buildInternal() {
StringBuilder createStatement = new StringBuilder(STATEMENT_START).append("CREATE CUSTOM INDEX "); StringBuilder createStatement =
new StringBuilder(STATEMENT_START).append("CREATE CUSTOM INDEX ");
if (ifNotExists) { if (ifNotExists) {
createStatement.append("IF NOT EXISTS "); createStatement.append("IF NOT EXISTS ");
} }
createStatement.append(indexName).append(" ON "); createStatement.append(indexName).append(" ON ");
if (keyspaceName.isPresent()) { if (keyspaceName.isPresent()) {
createStatement.append(keyspaceName.get()).append("."); createStatement.append(keyspaceName.get()).append(".");
} }
createStatement.append(tableName); createStatement.append(tableName);
createStatement.append("("); createStatement.append("(");
if (keys) { if (keys) {
createStatement.append("KEYS("); createStatement.append("KEYS(");
} }
createStatement.append(columnName); createStatement.append(columnName);
if (keys) { if (keys) {
createStatement.append(")"); createStatement.append(")");
} }
createStatement.append(")"); createStatement.append(")");
createStatement.append(" USING '"); createStatement.append(" USING '");
createStatement.append(getCustomClassName()); createStatement.append(getCustomClassName());
createStatement.append("' WITH OPTIONS = {"); createStatement.append("' WITH OPTIONS = {");
createStatement.append(getOptions()); createStatement.append(getOptions());
createStatement.append(" }"); createStatement.append(" }");
return createStatement.toString(); return createStatement.toString();
} }
public class CreateIndexOn extends CreateIndex.CreateIndexOn { public class CreateIndexOn extends CreateIndex.CreateIndexOn {
/** /**
* Specify the column to create the index on. * Specify the column to create the index on.
* *
* @param columnName * @param columnName the column name.
* the column name. * @return the final CREATE INDEX statement.
* @return the final CREATE INDEX statement. */
*/ public SchemaStatement andColumn(String columnName) {
public SchemaStatement andColumn(String columnName) { validateNotEmpty(columnName, "Column name");
validateNotEmpty(columnName, "Column name"); validateNotKeyWord(
validateNotKeyWord(columnName, columnName,
String.format("The column name '%s' is not allowed because it is a reserved keyword", columnName)); String.format(
CreateCustomIndex.this.columnName = columnName; "The column name '%s' is not allowed because it is a reserved keyword", columnName));
return SchemaStatement.fromQueryString(buildInternal()); CreateCustomIndex.this.columnName = columnName;
} return SchemaStatement.fromQueryString(buildInternal());
}
/** /**
* Create an index on the keys of the given map column. * Create an index on the keys of the given map column.
* *
* @param columnName * @param columnName the column name.
* the column name. * @return the final CREATE INDEX statement.
* @return the final CREATE INDEX statement. */
*/ public SchemaStatement andKeysOfColumn(String columnName) {
public SchemaStatement andKeysOfColumn(String columnName) { validateNotEmpty(columnName, "Column name");
validateNotEmpty(columnName, "Column name"); validateNotKeyWord(
validateNotKeyWord(columnName, columnName,
String.format("The column name '%s' is not allowed because it is a reserved keyword", columnName)); String.format(
CreateCustomIndex.this.columnName = columnName; "The column name '%s' is not allowed because it is a reserved keyword", columnName));
CreateCustomIndex.this.keys = true; CreateCustomIndex.this.columnName = columnName;
return SchemaStatement.fromQueryString(buildInternal()); CreateCustomIndex.this.keys = true;
} return SchemaStatement.fromQueryString(buildInternal());
} }
}
} }

View file

@ -5,48 +5,53 @@ import com.datastax.driver.core.querybuilder.Select;
public class CreateMaterializedView extends Create { public class CreateMaterializedView extends Create {
private String viewName; private String viewName;
private Select.Where selection; private Select.Where selection;
private String primaryKey; private String primaryKey;
private String clustering; private String clustering;
public CreateMaterializedView(String keyspaceName, String viewName, Select.Where selection, String primaryKey, public CreateMaterializedView(
String clustering) { String keyspaceName,
super(keyspaceName, viewName); String viewName,
this.viewName = viewName; Select.Where selection,
this.selection = selection; String primaryKey,
this.primaryKey = primaryKey; String clustering) {
this.clustering = clustering; super(keyspaceName, viewName);
} this.viewName = viewName;
this.selection = selection;
this.primaryKey = primaryKey;
this.clustering = clustering;
}
public String getQueryString(CodecRegistry codecRegistry) { public String getQueryString(CodecRegistry codecRegistry) {
return buildInternal(); return buildInternal();
} }
public String buildInternal() { public String buildInternal() {
StringBuilder createStatement = new StringBuilder(STATEMENT_START).append("CREATE MATERIALIZED VIEW"); StringBuilder createStatement =
if (ifNotExists) { new StringBuilder(STATEMENT_START).append("CREATE MATERIALIZED VIEW");
createStatement.append(" IF NOT EXISTS"); if (ifNotExists) {
} createStatement.append(" IF NOT EXISTS");
createStatement.append(" "); }
if (keyspaceName.isPresent()) { createStatement.append(" ");
createStatement.append(keyspaceName.get()).append("."); if (keyspaceName.isPresent()) {
} createStatement.append(keyspaceName.get()).append(".");
createStatement.append(viewName); }
createStatement.append(" AS "); createStatement.append(viewName);
createStatement.append(selection.getQueryString()); createStatement.append(" AS ");
createStatement.setLength(createStatement.length() - 1); createStatement.append(selection.getQueryString());
createStatement.append(" "); createStatement.setLength(createStatement.length() - 1);
createStatement.append(primaryKey); createStatement.append(" ");
if (clustering != null) { createStatement.append(primaryKey);
createStatement.append(" ").append(clustering); if (clustering != null) {
} createStatement.append(" ").append(clustering);
createStatement.append(";"); }
createStatement.append(";");
return createStatement.toString(); return createStatement.toString();
} }
public String toString() { public String toString() {
return buildInternal(); return buildInternal();
} }
} }

View file

@ -2,16 +2,17 @@ package com.datastax.driver.core.schemabuilder;
public class CreateSasiIndex extends CreateCustomIndex { public class CreateSasiIndex extends CreateCustomIndex {
public CreateSasiIndex(String indexName) { public CreateSasiIndex(String indexName) {
super(indexName); super(indexName);
} }
String getCustomClassName() { String getCustomClassName() {
return "org.apache.cassandra.index.sasi.SASIIndex"; return "org.apache.cassandra.index.sasi.SASIIndex";
} }
String getOptions() { String getOptions() {
return "'analyzer_class': " + "'org.apache.cassandra.index.sasi.analyzer.NonTokenizingAnalyzer', " return "'analyzer_class': "
+ "'case_sensitive': 'false'"; + "'org.apache.cassandra.index.sasi.analyzer.NonTokenizingAnalyzer', "
} + "'case_sensitive': 'false'";
}
} }

View file

@ -20,19 +20,19 @@ import com.datastax.driver.core.CodecRegistry;
/** A built CREATE TABLE statement. */ /** A built CREATE TABLE statement. */
public class CreateTable extends Create { public class CreateTable extends Create {
public CreateTable(String keyspaceName, String tableName) { public CreateTable(String keyspaceName, String tableName) {
super(keyspaceName, tableName); super(keyspaceName, tableName);
} }
public CreateTable(String tableName) { public CreateTable(String tableName) {
super(tableName); super(tableName);
} }
public String getQueryString(CodecRegistry codecRegistry) { public String getQueryString(CodecRegistry codecRegistry) {
return buildInternal(); return buildInternal();
} }
public String toString() { public String toString() {
return buildInternal(); return buildInternal();
} }
} }

View file

@ -4,46 +4,49 @@ import com.google.common.base.Optional;
public class DropMaterializedView extends Drop { public class DropMaterializedView extends Drop {
private final String itemType = "MATERIALIZED VIEW"; private Optional<String> keyspaceName = Optional.absent();
private Optional<String> keyspaceName = Optional.absent(); private String itemName;
private String itemName; private boolean ifExists = true;
private boolean ifExists = true;
public DropMaterializedView(String keyspaceName, String viewName) {
this(keyspaceName, viewName, DroppedItem.MATERIALIZED_VIEW);
}
private DropMaterializedView(String keyspaceName, String viewName, DroppedItem itemType) { public DropMaterializedView(String keyspaceName, String viewName) {
super(keyspaceName, viewName, Drop.DroppedItem.TABLE); this(keyspaceName, viewName, DroppedItem.MATERIALIZED_VIEW);
validateNotEmpty(keyspaceName, "Keyspace name"); }
this.keyspaceName = Optional.fromNullable(keyspaceName);
this.itemName = viewName;
}
/** private DropMaterializedView(String keyspaceName, String viewName, DroppedItem itemType) {
* Add the 'IF EXISTS' condition to this DROP statement. super(keyspaceName, viewName, Drop.DroppedItem.TABLE);
* validateNotEmpty(keyspaceName, "Keyspace name");
* @return this statement. this.keyspaceName = Optional.fromNullable(keyspaceName);
*/ this.itemName = viewName;
public Drop ifExists() { }
this.ifExists = true;
return this;
}
@Override /**
public String buildInternal() { * Add the 'IF EXISTS' condition to this DROP statement.
StringBuilder dropStatement = new StringBuilder("DROP " + itemType + " "); *
if (ifExists) { * @return this statement.
dropStatement.append("IF EXISTS "); */
} public Drop ifExists() {
if (keyspaceName.isPresent()) { this.ifExists = true;
dropStatement.append(keyspaceName.get()).append("."); return this;
} }
dropStatement.append(itemName); @Override
return dropStatement.toString(); public String buildInternal() {
} StringBuilder dropStatement = new StringBuilder("DROP MATERIALIZED VIEW ");
if (ifExists) {
dropStatement.append("IF EXISTS ");
}
if (keyspaceName.isPresent()) {
dropStatement.append(keyspaceName.get()).append(".");
}
enum DroppedItem { dropStatement.append(itemName);
TABLE, TYPE, INDEX, MATERIALIZED_VIEW return dropStatement.toString();
} }
enum DroppedItem {
TABLE,
TYPE,
INDEX,
MATERIALIZED_VIEW
}
} }

View file

@ -17,7 +17,6 @@ package net.helenus.config;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.function.Function; import java.util.function.Function;
import net.helenus.core.DslInstantiator; import net.helenus.core.DslInstantiator;
import net.helenus.core.MapperInstantiator; import net.helenus.core.MapperInstantiator;
import net.helenus.core.reflect.ReflectionDslInstantiator; import net.helenus.core.reflect.ReflectionDslInstantiator;
@ -26,23 +25,23 @@ import net.helenus.mapping.convert.CamelCaseToUnderscoreConverter;
public class DefaultHelenusSettings implements HelenusSettings { public class DefaultHelenusSettings implements HelenusSettings {
@Override @Override
public Function<String, String> getPropertyToColumnConverter() { public Function<String, String> getPropertyToColumnConverter() {
return CamelCaseToUnderscoreConverter.INSTANCE; return CamelCaseToUnderscoreConverter.INSTANCE;
} }
@Override @Override
public Function<Method, Boolean> getGetterMethodDetector() { public Function<Method, Boolean> getGetterMethodDetector() {
return GetterMethodDetector.INSTANCE; return GetterMethodDetector.INSTANCE;
} }
@Override @Override
public DslInstantiator getDslInstantiator() { public DslInstantiator getDslInstantiator() {
return ReflectionDslInstantiator.INSTANCE; return ReflectionDslInstantiator.INSTANCE;
} }
@Override @Override
public MapperInstantiator getMapperInstantiator() { public MapperInstantiator getMapperInstantiator() {
return ReflectionMapperInstantiator.INSTANCE; return ReflectionMapperInstantiator.INSTANCE;
} }
} }

View file

@ -18,32 +18,31 @@ package net.helenus.config;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.lang.reflect.Modifier; import java.lang.reflect.Modifier;
import java.util.function.Function; import java.util.function.Function;
import net.helenus.mapping.annotation.Transient; import net.helenus.mapping.annotation.Transient;
public enum GetterMethodDetector implements Function<Method, Boolean> { public enum GetterMethodDetector implements Function<Method, Boolean> {
INSTANCE; INSTANCE;
@Override @Override
public Boolean apply(Method method) { public Boolean apply(Method method) {
if (method == null) { if (method == null) {
throw new IllegalArgumentException("empty parameter"); throw new IllegalArgumentException("empty parameter");
} }
if (method.getParameterCount() != 0 || method.getReturnType() == void.class) { if (method.getParameterCount() != 0 || method.getReturnType() == void.class) {
return false; return false;
} }
if (Modifier.isStatic(method.getModifiers())) { if (Modifier.isStatic(method.getModifiers())) {
return false; return false;
} }
// Methods marked "Transient" are not mapped, skip them. // Methods marked "Transient" are not mapped, skip them.
if (method.getDeclaredAnnotation(Transient.class) != null) { if (method.getDeclaredAnnotation(Transient.class) != null) {
return false; return false;
} }
return true; return true;
} }
} }

View file

@ -17,17 +17,16 @@ package net.helenus.config;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.function.Function; import java.util.function.Function;
import net.helenus.core.DslInstantiator; import net.helenus.core.DslInstantiator;
import net.helenus.core.MapperInstantiator; import net.helenus.core.MapperInstantiator;
public interface HelenusSettings { public interface HelenusSettings {
Function<String, String> getPropertyToColumnConverter(); Function<String, String> getPropertyToColumnConverter();
Function<Method, Boolean> getGetterMethodDetector(); Function<Method, Boolean> getGetterMethodDetector();
DslInstantiator getDslInstantiator(); DslInstantiator getDslInstantiator();
MapperInstantiator getMapperInstantiator(); MapperInstantiator getMapperInstantiator();
} }

View file

@ -3,37 +3,36 @@ package net.helenus.core;
import java.time.LocalDateTime; import java.time.LocalDateTime;
import java.time.ZoneId; import java.time.ZoneId;
import java.util.Date; import java.util.Date;
import net.helenus.core.reflect.MapExportable; import net.helenus.core.reflect.MapExportable;
public abstract class AbstractAuditedEntityDraft<E> extends AbstractEntityDraft<E> { public abstract class AbstractAuditedEntityDraft<E> extends AbstractEntityDraft<E> {
public AbstractAuditedEntityDraft(MapExportable entity) { public AbstractAuditedEntityDraft(MapExportable entity) {
super(entity); super(entity);
Date in = new Date(); Date in = new Date();
LocalDateTime ldt = LocalDateTime.ofInstant(in.toInstant(), ZoneId.systemDefault()); LocalDateTime ldt = LocalDateTime.ofInstant(in.toInstant(), ZoneId.systemDefault());
Date now = Date.from(ldt.atZone(ZoneId.systemDefault()).toInstant()); Date now = Date.from(ldt.atZone(ZoneId.systemDefault()).toInstant());
String who = getCurrentAuditor(); String who = getCurrentAuditor();
if (entity == null) { if (entity == null) {
if (who != null) { if (who != null) {
set("createdBy", who); set("createdBy", who);
} }
set("createdAt", now); set("createdAt", now);
} }
if (who != null) { if (who != null) {
set("modifiedBy", who); set("modifiedBy", who);
} }
set("modifiedAt", now); set("modifiedAt", now);
} }
protected String getCurrentAuditor() { protected String getCurrentAuditor() {
return null; return null;
} }
public Date createdAt() { public Date createdAt() {
return (Date) get("createdAt", Date.class); return (Date) get("createdAt", Date.class);
} }
} }

View file

@ -1,174 +1,171 @@
package net.helenus.core; package net.helenus.core;
import com.google.common.primitives.Primitives;
import java.io.Serializable; import java.io.Serializable;
import java.util.*; import java.util.*;
import org.apache.commons.lang3.SerializationUtils;
import com.google.common.primitives.Primitives;
import net.helenus.core.reflect.DefaultPrimitiveTypes; import net.helenus.core.reflect.DefaultPrimitiveTypes;
import net.helenus.core.reflect.Drafted; import net.helenus.core.reflect.Drafted;
import net.helenus.core.reflect.MapExportable; import net.helenus.core.reflect.MapExportable;
import net.helenus.mapping.MappingUtil; import net.helenus.mapping.MappingUtil;
import org.apache.commons.lang3.SerializationUtils;
public abstract class AbstractEntityDraft<E> implements Drafted<E> { public abstract class AbstractEntityDraft<E> implements Drafted<E> {
private final Map<String, Object> backingMap = new HashMap<String, Object>(); private final Map<String, Object> backingMap = new HashMap<String, Object>();
private final MapExportable entity; private final MapExportable entity;
private final Map<String, Object> entityMap; private final Map<String, Object> entityMap;
public AbstractEntityDraft(MapExportable entity) { public AbstractEntityDraft(MapExportable entity) {
this.entity = entity; this.entity = entity;
this.entityMap = entity != null ? entity.toMap() : new HashMap<String, Object>(); this.entityMap = entity != null ? entity.toMap() : new HashMap<String, Object>();
} }
public abstract Class<E> getEntityClass(); public abstract Class<E> getEntityClass();
public E build() { public E build() {
return Helenus.map(getEntityClass(), toMap()); return Helenus.map(getEntityClass(), toMap());
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public <T> T get(Getter<T> getter, Class<?> returnType) { public <T> T get(Getter<T> getter, Class<?> returnType) {
return (T) get(this.<T>methodNameFor(getter), returnType); return (T) get(this.<T>methodNameFor(getter), returnType);
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public <T> T get(String key, Class<?> returnType) { public <T> T get(String key, Class<?> returnType) {
T value = (T) backingMap.get(key); T value = (T) backingMap.get(key);
if (value == null) { if (value == null) {
value = (T) entityMap.get(key); value = (T) entityMap.get(key);
if (value == null) { if (value == null) {
if (Primitives.allPrimitiveTypes().contains(returnType)) { if (Primitives.allPrimitiveTypes().contains(returnType)) {
DefaultPrimitiveTypes type = DefaultPrimitiveTypes.lookup(returnType); DefaultPrimitiveTypes type = DefaultPrimitiveTypes.lookup(returnType);
if (type == null) { if (type == null) {
throw new RuntimeException("unknown primitive type " + returnType); throw new RuntimeException("unknown primitive type " + returnType);
} }
return (T) type.getDefaultValue(); return (T) type.getDefaultValue();
} }
} else { } else {
// Collections fetched from the entityMap // Collections fetched from the entityMap
if (value instanceof Collection) { if (value instanceof Collection) {
try { try {
value = MappingUtil.<T>clone(value); value = MappingUtil.<T>clone(value);
} catch (CloneNotSupportedException e) { } catch (CloneNotSupportedException e) {
// TODO(gburd): deep?shallow? copy of List, Map, Set to a mutable collection. // TODO(gburd): deep?shallow? copy of List, Map, Set to a mutable collection.
value = (T) SerializationUtils.<Serializable>clone((Serializable) value); value = (T) SerializationUtils.<Serializable>clone((Serializable) value);
} }
} }
} }
} }
return value; return value;
} }
public <T> Object set(Getter<T> getter, Object value) { public <T> Object set(Getter<T> getter, Object value) {
return set(this.<T>methodNameFor(getter), value); return set(this.<T>methodNameFor(getter), value);
} }
public Object set(String key, Object value) { public Object set(String key, Object value) {
if (key == null || value == null) { if (key == null || value == null) {
return null; return null;
} }
backingMap.put(key, value); backingMap.put(key, value);
return value; return value;
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public <T> T mutate(Getter<T> getter, T value) { public <T> T mutate(Getter<T> getter, T value) {
return (T) mutate(this.<T>methodNameFor(getter), value); return (T) mutate(this.<T>methodNameFor(getter), value);
} }
public Object mutate(String key, Object value) { public Object mutate(String key, Object value) {
Objects.requireNonNull(key); Objects.requireNonNull(key);
if (value == null) { if (value == null) {
return null; return null;
} }
if (entity != null) { if (entity != null) {
Map<String, Object> map = entity.toMap(); Map<String, Object> map = entity.toMap();
if (map.containsKey(key) && !value.equals(map.get(key))) { if (map.containsKey(key) && !value.equals(map.get(key))) {
backingMap.put(key, value); backingMap.put(key, value);
return value; return value;
} }
return map.get(key); return map.get(key);
} else { } else {
backingMap.put(key, value); backingMap.put(key, value);
return null; return null;
} }
} }
private <T> String methodNameFor(Getter<T> getter) { private <T> String methodNameFor(Getter<T> getter) {
return MappingUtil.resolveMappingProperty(getter).getProperty().getPropertyName(); return MappingUtil.resolveMappingProperty(getter).getProperty().getPropertyName();
} }
public <T> Object unset(Getter<T> getter) { public <T> Object unset(Getter<T> getter) {
return unset(methodNameFor(getter)); return unset(methodNameFor(getter));
} }
public Object unset(String key) { public Object unset(String key) {
if (key != null) { if (key != null) {
Object value = backingMap.get(key); Object value = backingMap.get(key);
backingMap.put(key, null); backingMap.put(key, null);
return value; return value;
} }
return null; return null;
} }
public <T> boolean reset(Getter<T> getter, T desiredValue) { public <T> boolean reset(Getter<T> getter, T desiredValue) {
return this.<T>reset(this.<T>methodNameFor(getter), desiredValue); return this.<T>reset(this.<T>methodNameFor(getter), desiredValue);
} }
public <T> boolean reset(String key, T desiredValue) { public <T> boolean reset(String key, T desiredValue) {
if (key != null && desiredValue != null) { if (key != null && desiredValue != null) {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
T currentValue = (T) backingMap.get(key); T currentValue = (T) backingMap.get(key);
if (currentValue == null || !currentValue.equals(desiredValue)) { if (currentValue == null || !currentValue.equals(desiredValue)) {
set(key, desiredValue); set(key, desiredValue);
return true; return true;
} }
} }
return false; return false;
} }
@Override @Override
public Map<String, Object> toMap() { public Map<String, Object> toMap() {
return toMap(entityMap); return toMap(entityMap);
} }
public Map<String, Object> toMap(Map<String, Object> entityMap) { public Map<String, Object> toMap(Map<String, Object> entityMap) {
Map<String, Object> combined; Map<String, Object> combined;
if (entityMap != null && entityMap.size() > 0) { if (entityMap != null && entityMap.size() > 0) {
combined = new HashMap<String, Object>(entityMap.size()); combined = new HashMap<String, Object>(entityMap.size());
for (String key : entityMap.keySet()) { for (Map.Entry<String, Object> e : entityMap.entrySet()) {
combined.put(key, entityMap.get(key)); combined.put(e.getKey(), e.getValue());
} }
} else { } else {
combined = new HashMap<String, Object>(backingMap.size()); combined = new HashMap<String, Object>(backingMap.size());
} }
for (String key : mutated()) { for (String key : mutated()) {
combined.put(key, backingMap.get(key)); combined.put(key, backingMap.get(key));
} }
return combined; return combined;
} }
@Override @Override
public Set<String> mutated() { public Set<String> mutated() {
return backingMap.keySet(); return backingMap.keySet();
} }
@Override @Override
public String toString() { public String toString() {
return backingMap.toString(); return backingMap.toString();
} }
} }

View file

@ -15,143 +15,139 @@
*/ */
package net.helenus.core; package net.helenus.core;
import java.io.PrintStream; import brave.Tracer;
import java.util.List;
import java.util.concurrent.Executor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.MetricRegistry;
import com.datastax.driver.core.*; import com.datastax.driver.core.*;
import com.google.common.base.Stopwatch; import com.google.common.base.Stopwatch;
import com.google.common.collect.Table; import com.google.common.collect.Table;
import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListenableFuture;
import java.io.PrintStream;
import brave.Tracer; import java.util.List;
import java.util.concurrent.Executor;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
import net.helenus.core.operation.Operation; import net.helenus.core.operation.Operation;
import net.helenus.mapping.value.ColumnValuePreparer; import net.helenus.mapping.value.ColumnValuePreparer;
import net.helenus.mapping.value.ColumnValueProvider; import net.helenus.mapping.value.ColumnValueProvider;
import net.helenus.support.Either; import net.helenus.support.Either;
import net.helenus.support.HelenusException; import net.helenus.support.HelenusException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class AbstractSessionOperations { public abstract class AbstractSessionOperations {
private static final Logger LOG = LoggerFactory.getLogger(AbstractSessionOperations.class); private static final Logger LOG = LoggerFactory.getLogger(AbstractSessionOperations.class);
public abstract Session currentSession(); public abstract Session currentSession();
public abstract String usingKeyspace(); public abstract String usingKeyspace();
public abstract boolean isShowCql(); public abstract boolean isShowCql();
public abstract PrintStream getPrintStream(); public abstract PrintStream getPrintStream();
public abstract Executor getExecutor(); public abstract Executor getExecutor();
public abstract SessionRepository getSessionRepository(); public abstract SessionRepository getSessionRepository();
public abstract ColumnValueProvider getValueProvider(); public abstract ColumnValueProvider getValueProvider();
public abstract ColumnValuePreparer getValuePreparer(); public abstract ColumnValuePreparer getValuePreparer();
public abstract ConsistencyLevel getDefaultConsistencyLevel(); public abstract ConsistencyLevel getDefaultConsistencyLevel();
public abstract boolean getDefaultQueryIdempotency(); public abstract boolean getDefaultQueryIdempotency();
public PreparedStatement prepare(RegularStatement statement) { public PreparedStatement prepare(RegularStatement statement) {
try { try {
logStatement(statement, false); logStatement(statement, false);
return currentSession().prepare(statement); return currentSession().prepare(statement);
} catch (RuntimeException e) { } catch (RuntimeException e) {
throw translateException(e); throw translateException(e);
} }
} }
public ListenableFuture<PreparedStatement> prepareAsync(RegularStatement statement) { public ListenableFuture<PreparedStatement> prepareAsync(RegularStatement statement) {
try { try {
logStatement(statement, false); logStatement(statement, false);
return currentSession().prepareAsync(statement); return currentSession().prepareAsync(statement);
} catch (RuntimeException e) { } catch (RuntimeException e) {
throw translateException(e); throw translateException(e);
} }
} }
public ResultSet execute(Statement statement, boolean showValues) { public ResultSet execute(Statement statement, boolean showValues) {
return execute(statement, null, null, showValues); return execute(statement, null, null, showValues);
} }
public ResultSet execute(Statement statement, Stopwatch timer, boolean showValues) { public ResultSet execute(Statement statement, Stopwatch timer, boolean showValues) {
return execute(statement, null, timer, showValues); return execute(statement, null, timer, showValues);
} }
public ResultSet execute(Statement statement, UnitOfWork uow, boolean showValues) { public ResultSet execute(Statement statement, UnitOfWork uow, boolean showValues) {
return execute(statement, uow, null, showValues); return execute(statement, uow, null, showValues);
} }
public ResultSet execute(Statement statement, UnitOfWork uow, Stopwatch timer, boolean showValues) { public ResultSet execute(
return executeAsync(statement, uow, timer, showValues).getUninterruptibly(); Statement statement, UnitOfWork uow, Stopwatch timer, boolean showValues) {
} return executeAsync(statement, uow, timer, showValues).getUninterruptibly();
}
public ResultSetFuture executeAsync(Statement statement, boolean showValues) { public ResultSetFuture executeAsync(Statement statement, boolean showValues) {
return executeAsync(statement, null, null, showValues); return executeAsync(statement, null, null, showValues);
} }
public ResultSetFuture executeAsync(Statement statement, Stopwatch timer, boolean showValues) { public ResultSetFuture executeAsync(Statement statement, Stopwatch timer, boolean showValues) {
return executeAsync(statement, null, timer, showValues); return executeAsync(statement, null, timer, showValues);
} }
public ResultSetFuture executeAsync(Statement statement, UnitOfWork uow, boolean showValues) { public ResultSetFuture executeAsync(Statement statement, UnitOfWork uow, boolean showValues) {
return executeAsync(statement, uow, null, showValues); return executeAsync(statement, uow, null, showValues);
} }
public ResultSetFuture executeAsync(Statement statement, UnitOfWork uow, Stopwatch timer, boolean showValues) { public ResultSetFuture executeAsync(
try { Statement statement, UnitOfWork uow, Stopwatch timer, boolean showValues) {
logStatement(statement, showValues); try {
return currentSession().executeAsync(statement); logStatement(statement, showValues);
} catch (RuntimeException e) { return currentSession().executeAsync(statement);
throw translateException(e); } catch (RuntimeException e) {
} throw translateException(e);
} }
}
private void logStatement(Statement statement, boolean showValues) { private void logStatement(Statement statement, boolean showValues) {
if (isShowCql()) { if (isShowCql()) {
printCql(Operation.queryString(statement, showValues)); printCql(Operation.queryString(statement, showValues));
} else if (LOG.isInfoEnabled()) { } else if (LOG.isDebugEnabled()) {
LOG.info("CQL> " + Operation.queryString(statement, showValues)); LOG.info("CQL> " + Operation.queryString(statement, showValues));
} }
} }
public Tracer getZipkinTracer() { public Tracer getZipkinTracer() {
return null; return null;
} }
public MetricRegistry getMetricRegistry() { public MetricRegistry getMetricRegistry() {
return null; return null;
} }
public void mergeCache(Table<String, String, Either<Object, List<Facet>>> uowCache) { public void mergeCache(Table<String, String, Either<Object, List<Facet>>> uowCache) {}
}
RuntimeException translateException(RuntimeException e) { RuntimeException translateException(RuntimeException e) {
if (e instanceof HelenusException) { if (e instanceof HelenusException) {
return e; return e;
} }
throw new HelenusException(e); throw new HelenusException(e);
} }
public Object checkCache(String tableName, List<Facet> facets) { public Object checkCache(String tableName, List<Facet> facets) {
return null; return null;
} }
public void updateCache(Object pojo, List<Facet> facets) { public void updateCache(Object pojo, List<Facet> facets) {}
}
void printCql(String cql) { void printCql(String cql) {
getPrintStream().println(cql); getPrintStream().println(cql);
} }
public void cacheEvict(List<Facet> facets) { public void cacheEvict(List<Facet> facets) {}
}
} }

View file

@ -17,349 +17,391 @@ package net.helenus.core;
import static net.helenus.core.HelenusSession.deleted; import static net.helenus.core.HelenusSession.deleted;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.diffplug.common.base.Errors; import com.diffplug.common.base.Errors;
import com.google.common.base.Stopwatch; import com.google.common.base.Stopwatch;
import com.google.common.collect.HashBasedTable; import com.google.common.collect.HashBasedTable;
import com.google.common.collect.Table; import com.google.common.collect.Table;
import com.google.common.collect.TreeTraverser; import com.google.common.collect.TreeTraverser;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import net.helenus.core.cache.CacheUtil; import net.helenus.core.cache.CacheUtil;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
import net.helenus.support.Either; import net.helenus.support.Either;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** Encapsulates the concept of a "transaction" as a unit-of-work. */ /** Encapsulates the concept of a "transaction" as a unit-of-work. */
public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfWork<E>, AutoCloseable { public abstract class AbstractUnitOfWork<E extends Exception>
implements UnitOfWork<E>, AutoCloseable {
private static final Logger LOG = LoggerFactory.getLogger(AbstractUnitOfWork.class); private static final Logger LOG = LoggerFactory.getLogger(AbstractUnitOfWork.class);
private final List<AbstractUnitOfWork<E>> nested = new ArrayList<>(); private final List<AbstractUnitOfWork<E>> nested = new ArrayList<>();
private final HelenusSession session; private final HelenusSession session;
private final AbstractUnitOfWork<E> parent; private final AbstractUnitOfWork<E> parent;
private final Table<String, String, Either<Object, List<Facet>>> cache = HashBasedTable.create(); private final Table<String, String, Either<Object, List<Facet>>> cache = HashBasedTable.create();
protected String purpose; protected String purpose;
protected List<String> nestedPurposes = new ArrayList<String>(); protected List<String> nestedPurposes = new ArrayList<String>();
protected int cacheHits = 0; protected String info;
protected int cacheMisses = 0; protected int cacheHits = 0;
protected int databaseLookups = 0; protected int cacheMisses = 0;
protected Stopwatch elapsedTime; protected int databaseLookups = 0;
protected Map<String, Double> databaseTime = new HashMap<>(); protected Stopwatch elapsedTime;
protected double cacheLookupTime = 0.0; protected Map<String, Double> databaseTime = new HashMap<>();
private List<CommitThunk> postCommit = new ArrayList<CommitThunk>(); protected double cacheLookupTime = 0.0;
private boolean aborted = false; private List<CommitThunk> postCommit = new ArrayList<CommitThunk>();
private boolean committed = false; private boolean aborted = false;
private boolean committed = false;
protected AbstractUnitOfWork(HelenusSession session, AbstractUnitOfWork<E> parent) { protected AbstractUnitOfWork(HelenusSession session, AbstractUnitOfWork<E> parent) {
Objects.requireNonNull(session, "containing session cannot be null"); Objects.requireNonNull(session, "containing session cannot be null");
this.session = session; this.session = session;
this.parent = parent; this.parent = parent;
} }
@Override @Override
public void addDatabaseTime(String name, Stopwatch amount) { public void addDatabaseTime(String name, Stopwatch amount) {
Double time = databaseTime.get(name); Double time = databaseTime.get(name);
if (time == null) { if (time == null) {
databaseTime.put(name, (double) amount.elapsed(TimeUnit.MICROSECONDS)); databaseTime.put(name, (double) amount.elapsed(TimeUnit.MICROSECONDS));
} else { } else {
databaseTime.put(name, time + amount.elapsed(TimeUnit.MICROSECONDS)); databaseTime.put(name, time + amount.elapsed(TimeUnit.MICROSECONDS));
} }
} }
@Override @Override
public void addCacheLookupTime(Stopwatch amount) { public void addCacheLookupTime(Stopwatch amount) {
cacheLookupTime += amount.elapsed(TimeUnit.MICROSECONDS); cacheLookupTime += amount.elapsed(TimeUnit.MICROSECONDS);
} }
@Override @Override
public void addNestedUnitOfWork(UnitOfWork<E> uow) { public void addNestedUnitOfWork(UnitOfWork<E> uow) {
synchronized (nested) { synchronized (nested) {
nested.add((AbstractUnitOfWork<E>) uow); nested.add((AbstractUnitOfWork<E>) uow);
} }
} }
@Override @Override
public synchronized UnitOfWork<E> begin() { public synchronized UnitOfWork<E> begin() {
if (LOG.isInfoEnabled()) { if (LOG.isInfoEnabled()) {
elapsedTime = Stopwatch.createStarted(); elapsedTime = Stopwatch.createStarted();
} }
// log.record(txn::start) // log.record(txn::start)
return this; return this;
} }
@Override @Override
public String getPurpose() { public String getPurpose() {
return purpose; return purpose;
} }
@Override @Override
public UnitOfWork setPurpose(String purpose) { public UnitOfWork setPurpose(String purpose) {
this.purpose = purpose; this.purpose = purpose;
return this; return this;
} }
@Override @Override
public void recordCacheAndDatabaseOperationCount(int cache, int ops) { public void setInfo(String info) {
if (cache > 0) { this.info = info;
cacheHits += cache; }
} else {
cacheMisses += Math.abs(cache);
}
if (ops > 0) {
databaseLookups += ops;
}
}
public String logTimers(String what) { @Override
double e = (double) elapsedTime.elapsed(TimeUnit.MICROSECONDS) / 1000.0; public void recordCacheAndDatabaseOperationCount(int cache, int ops) {
double d = 0.0; if (cache > 0) {
double c = cacheLookupTime / 1000.0; cacheHits += cache;
double fc = (c / e) * 100.0; } else {
String database = ""; cacheMisses += Math.abs(cache);
if (databaseTime.size() > 0) { }
List<String> dbt = new ArrayList<>(databaseTime.size()); if (ops > 0) {
for (String name : databaseTime.keySet()) { databaseLookups += ops;
double t = databaseTime.get(name) / 1000.0; }
d += t; }
dbt.add(String.format("%s took %,.3fms %,2.2f%%", name, t, (t / e) * 100.0));
}
double fd = (d / e) * 100.0;
database = String.format(", %d quer%s (%,.3fms %,2.2f%% - %s)", databaseLookups,
(databaseLookups > 1) ? "ies" : "y", d, fd, String.join(", ", dbt));
}
String cache = "";
if (cacheLookupTime > 0) {
int cacheLookups = cacheHits + cacheMisses;
cache = String.format(" with %d cache lookup%s (%,.3fms %,2.2f%% - %,d hit, %,d miss)", cacheLookups,
cacheLookups > 1 ? "s" : "", c, fc, cacheHits, cacheMisses);
}
String da = "";
if (databaseTime.size() > 0 || cacheLookupTime > 0) {
double dat = d + c;
double daf = (dat / e) * 100;
da = String.format(" consuming %,.3fms for data access, or %,2.2f%% of total UOW time.", dat, daf);
}
String x = nestedPurposes.stream().distinct().collect(Collectors.joining(", "));
String n = nested.stream().map(uow -> String.valueOf(uow.hashCode())).collect(Collectors.joining(", "));
String s = String.format(Locale.US, "UOW(%s%s) %s in %,.3fms%s%s%s%s%s", hashCode(),
(nested.size() > 0 ? ", [" + n + "]" : ""), what, e, cache, database, da,
(purpose == null ? "" : " " + purpose), (nestedPurposes.isEmpty()) ? "" : ", " + x);
return s;
}
private void applyPostCommitFunctions() { public String logTimers(String what) {
if (!postCommit.isEmpty()) { double e = (double) elapsedTime.elapsed(TimeUnit.MICROSECONDS) / 1000.0;
for (CommitThunk f : postCommit) { double d = 0.0;
f.apply(); double c = cacheLookupTime / 1000.0;
} double fc = (c / e) * 100.0;
} String database = "";
if (LOG.isInfoEnabled()) { if (databaseTime.size() > 0) {
LOG.info(logTimers("committed")); List<String> dbt = new ArrayList<>(databaseTime.size());
} for (Map.Entry<String, Double> dt : databaseTime.entrySet()) {
} double t = dt.getValue() / 1000.0;
d += t;
dbt.add(String.format("%s took %,.3fms %,2.2f%%", dt.getKey(), t, (t / e) * 100.0));
}
double fd = (d / e) * 100.0;
database =
String.format(
", %d quer%s (%,.3fms %,2.2f%% - %s)",
databaseLookups, (databaseLookups > 1) ? "ies" : "y", d, fd, String.join(", ", dbt));
}
String cache = "";
if (cacheLookupTime > 0) {
int cacheLookups = cacheHits + cacheMisses;
cache =
String.format(
" with %d cache lookup%s (%,.3fms %,2.2f%% - %,d hit, %,d miss)",
cacheLookups, cacheLookups > 1 ? "s" : "", c, fc, cacheHits, cacheMisses);
}
String da = "";
if (databaseTime.size() > 0 || cacheLookupTime > 0) {
double dat = d + c;
double daf = (dat / e) * 100;
da =
String.format(
" consuming %,.3fms for data access, or %,2.2f%% of total UOW time.", dat, daf);
}
String x = nestedPurposes.stream().distinct().collect(Collectors.joining(", "));
String n =
nested
.stream()
.map(uow -> String.valueOf(uow.hashCode()))
.collect(Collectors.joining(", "));
String s =
String.format(
Locale.US,
"UOW(%s%s) %s in %,.3fms%s%s%s%s%s%s",
hashCode(),
(nested.size() > 0 ? ", [" + n + "]" : ""),
what,
e,
cache,
database,
da,
(purpose == null ? "" : " " + purpose),
(nestedPurposes.isEmpty()) ? "" : ", " + x,
(info == null) ? "" : " " + info);
return s;
}
@Override private void applyPostCommitFunctions() {
public Optional<Object> cacheLookup(List<Facet> facets) { if (!postCommit.isEmpty()) {
String tableName = CacheUtil.schemaName(facets); for (CommitThunk f : postCommit) {
Optional<Object> result = Optional.empty(); f.apply();
for (Facet facet : facets) { }
if (!facet.fixed()) { }
String columnName = facet.name() + "==" + facet.value(); if (LOG.isInfoEnabled()) {
Either<Object, List<Facet>> eitherValue = cache.get(tableName, columnName); LOG.info(logTimers("committed"));
if (eitherValue != null) { }
Object value = deleted; }
if (eitherValue.isLeft()) {
value = eitherValue.getLeft();
}
result = Optional.of(value);
break;
}
}
}
if (!result.isPresent()) {
// Be sure to check all enclosing UnitOfWork caches as well, we may be nested.
if (parent != null) {
return parent.cacheLookup(facets);
}
}
return result;
}
@Override @Override
public List<Facet> cacheEvict(List<Facet> facets) { public Optional<Object> cacheLookup(List<Facet> facets) {
Either<Object, List<Facet>> deletedObjectFacets = Either.right(facets); String tableName = CacheUtil.schemaName(facets);
String tableName = CacheUtil.schemaName(facets); Optional<Object> result = Optional.empty();
Optional<Object> optionalValue = cacheLookup(facets); for (Facet facet : facets) {
if (optionalValue.isPresent()) { if (!facet.fixed()) {
Object value = optionalValue.get(); String columnName = facet.name() + "==" + facet.value();
Either<Object, List<Facet>> eitherValue = cache.get(tableName, columnName);
if (eitherValue != null) {
Object value = deleted;
if (eitherValue.isLeft()) {
value = eitherValue.getLeft();
}
result = Optional.of(value);
break;
}
}
}
if (!result.isPresent()) {
// Be sure to check all enclosing UnitOfWork caches as well, we may be nested.
if (parent != null) {
return parent.cacheLookup(facets);
}
}
return result;
}
for (Facet facet : facets) { @Override
if (!facet.fixed()) { public List<Facet> cacheEvict(List<Facet> facets) {
String columnKey = facet.name() + "==" + facet.value(); Either<Object, List<Facet>> deletedObjectFacets = Either.right(facets);
// mark the value identified by the facet to `deleted` String tableName = CacheUtil.schemaName(facets);
cache.put(tableName, columnKey, deletedObjectFacets); Optional<Object> optionalValue = cacheLookup(facets);
} if (optionalValue.isPresent()) {
} Object value = optionalValue.get();
// look for other row/col pairs that referenced the same object, mark them
// `deleted`
cache.columnKeySet().forEach(columnKey -> {
Either<Object, List<Facet>> eitherCachedValue = cache.get(tableName, columnKey);
if (eitherCachedValue.isLeft()) {
Object cachedValue = eitherCachedValue.getLeft();
if (cachedValue == value) {
cache.put(tableName, columnKey, deletedObjectFacets);
String[] parts = columnKey.split("==");
facets.add(new Facet<String>(parts[0], parts[1]));
}
}
});
}
return facets;
}
@Override for (Facet facet : facets) {
public void cacheUpdate(Object value, List<Facet> facets) { if (!facet.fixed()) {
String tableName = CacheUtil.schemaName(facets); String columnKey = facet.name() + "==" + facet.value();
for (Facet facet : facets) { // mark the value identified by the facet to `deleted`
if (!facet.fixed()) { cache.put(tableName, columnKey, deletedObjectFacets);
String columnName = facet.name() + "==" + facet.value(); }
cache.put(tableName, columnName, Either.left(value)); }
} // look for other row/col pairs that referenced the same object, mark them
} // `deleted`
} cache
.columnKeySet()
.forEach(
columnKey -> {
Either<Object, List<Facet>> eitherCachedValue = cache.get(tableName, columnKey);
if (eitherCachedValue.isLeft()) {
Object cachedValue = eitherCachedValue.getLeft();
if (cachedValue == value) {
cache.put(tableName, columnKey, deletedObjectFacets);
String[] parts = columnKey.split("==");
facets.add(new Facet<String>(parts[0], parts[1]));
}
}
});
}
return facets;
}
private Iterator<AbstractUnitOfWork<E>> getChildNodes() { @Override
return nested.iterator(); public void cacheUpdate(Object value, List<Facet> facets) {
} String tableName = CacheUtil.schemaName(facets);
for (Facet facet : facets) {
if (!facet.fixed()) {
String columnName = facet.name() + "==" + facet.value();
cache.put(tableName, columnName, Either.left(value));
}
}
}
/** private Iterator<AbstractUnitOfWork<E>> getChildNodes() {
* Checks to see if the work performed between calling begin and now can be return nested.iterator();
* committed or not. }
*
* @return a function from which to chain work that only happens when commit is
* successful
* @throws E
* when the work overlaps with other concurrent writers.
*/
public PostCommitFunction<Void, Void> commit() throws E {
// All nested UnitOfWork should be committed (not aborted) before calls to
// commit, check.
boolean canCommit = true;
TreeTraverser<AbstractUnitOfWork<E>> traverser = TreeTraverser.using(node -> node::getChildNodes);
for (AbstractUnitOfWork<E> uow : traverser.postOrderTraversal(this)) {
if (this != uow) {
canCommit &= (!uow.aborted && uow.committed);
}
}
// log.record(txn::provisionalCommit) /**
// examine log for conflicts in read-set and write-set between begin and * Checks to see if the work performed between calling begin and now can be committed or not.
// provisional commit *
// if (conflict) { throw new ConflictingUnitOfWorkException(this) } * @return a function from which to chain work that only happens when commit is successful
// else return function so as to enable commit.andThen(() -> { do something iff * @throws E when the work overlaps with other concurrent writers.
// commit was successful; }) */
public PostCommitFunction<Void, Void> commit() throws E {
// All nested UnitOfWork should be committed (not aborted) before calls to
// commit, check.
boolean canCommit = true;
TreeTraverser<AbstractUnitOfWork<E>> traverser =
TreeTraverser.using(node -> node::getChildNodes);
for (AbstractUnitOfWork<E> uow : traverser.postOrderTraversal(this)) {
if (this != uow) {
canCommit &= (!uow.aborted && uow.committed);
}
}
if (canCommit) { // log.record(txn::provisionalCommit)
committed = true; // examine log for conflicts in read-set and write-set between begin and
aborted = false; // provisional commit
// if (conflict) { throw new ConflictingUnitOfWorkException(this) }
// else return function so as to enable commit.andThen(() -> { do something iff
// commit was successful; })
nested.forEach((uow) -> Errors.rethrow().wrap(uow::commit)); if (canCommit) {
elapsedTime.stop(); committed = true;
aborted = false;
if (parent == null) { nested.forEach((uow) -> Errors.rethrow().wrap(uow::commit));
// Apply all post-commit functions, this is the outter-most UnitOfWork. elapsedTime.stop();
traverser.postOrderTraversal(this).forEach(uow -> {
uow.applyPostCommitFunctions();
});
// Merge our cache into the session cache. if (parent == null) {
session.mergeCache(cache); // Apply all post-commit functions, this is the outter-most UnitOfWork.
traverser
.postOrderTraversal(this)
.forEach(
uow -> {
uow.applyPostCommitFunctions();
});
return new PostCommitFunction(this, null); // Merge our cache into the session cache.
} else { session.mergeCache(cache);
// Merge cache and statistics into parent if there is one. return new PostCommitFunction(this, null);
parent.mergeCache(cache); } else {
if (purpose != null) {
parent.nestedPurposes.add(purpose);
}
parent.cacheHits += cacheHits;
parent.cacheMisses += cacheMisses;
parent.databaseLookups += databaseLookups;
parent.cacheLookupTime += cacheLookupTime;
for (String name : databaseTime.keySet()) {
if (parent.databaseTime.containsKey(name)) {
double t = parent.databaseTime.get(name);
parent.databaseTime.put(name, t + databaseTime.get(name));
} else {
parent.databaseTime.put(name, databaseTime.get(name));
}
}
}
}
// else {
// Constructor<T> ctor = clazz.getConstructor(conflictExceptionClass);
// T object = ctor.newInstance(new Object[] { String message });
// }
return new PostCommitFunction(this, postCommit);
}
/* Explicitly discard the work and mark it as as such in the log. */ // Merge cache and statistics into parent if there is one.
public synchronized void abort() { parent.mergeCache(cache);
TreeTraverser<AbstractUnitOfWork<E>> traverser = TreeTraverser.using(node -> node::getChildNodes); if (purpose != null) {
traverser.postOrderTraversal(this).forEach(uow -> { parent.nestedPurposes.add(purpose);
uow.committed = false; }
uow.aborted = true; parent.cacheHits += cacheHits;
}); parent.cacheMisses += cacheMisses;
// log.record(txn::abort) parent.databaseLookups += databaseLookups;
// cache.invalidateSince(txn::start time) parent.cacheLookupTime += cacheLookupTime;
if (LOG.isInfoEnabled()) { for (Map.Entry<String, Double> dt : databaseTime.entrySet()) {
if (elapsedTime.isRunning()) { String name = dt.getKey();
elapsedTime.stop(); if (parent.databaseTime.containsKey(name)) {
} double t = parent.databaseTime.get(name);
LOG.info(logTimers("aborted")); parent.databaseTime.put(name, t + dt.getValue());
} } else {
} parent.databaseTime.put(name, dt.getValue());
}
}
}
}
// else {
// Constructor<T> ctor = clazz.getConstructor(conflictExceptionClass);
// T object = ctor.newInstance(new Object[] { String message });
// }
return new PostCommitFunction(this, postCommit);
}
private void mergeCache(Table<String, String, Either<Object, List<Facet>>> from) { /* Explicitly discard the work and mark it as as such in the log. */
Table<String, String, Either<Object, List<Facet>>> to = this.cache; public synchronized void abort() {
from.rowMap().forEach((rowKey, columnMap) -> { TreeTraverser<AbstractUnitOfWork<E>> traverser =
columnMap.forEach((columnKey, value) -> { TreeTraverser.using(node -> node::getChildNodes);
if (to.contains(rowKey, columnKey)) { traverser
// TODO(gburd):... .postOrderTraversal(this)
to.put(rowKey, columnKey, Either.left(CacheUtil.merge(to.get(rowKey, columnKey).getLeft(), .forEach(
from.get(rowKey, columnKey).getLeft()))); uow -> {
} else { uow.committed = false;
to.put(rowKey, columnKey, from.get(rowKey, columnKey)); uow.aborted = true;
} });
}); // log.record(txn::abort)
}); // cache.invalidateSince(txn::start time)
} if (LOG.isInfoEnabled()) {
if (elapsedTime.isRunning()) {
elapsedTime.stop();
}
LOG.info(logTimers("aborted"));
}
}
public String describeConflicts() { private void mergeCache(Table<String, String, Either<Object, List<Facet>>> from) {
return "it's complex..."; Table<String, String, Either<Object, List<Facet>>> to = this.cache;
} from.rowMap()
.forEach(
(rowKey, columnMap) -> {
columnMap.forEach(
(columnKey, value) -> {
if (to.contains(rowKey, columnKey)) {
// TODO(gburd):...
to.put(
rowKey,
columnKey,
Either.left(
CacheUtil.merge(
to.get(rowKey, columnKey).getLeft(),
from.get(rowKey, columnKey).getLeft())));
} else {
to.put(rowKey, columnKey, from.get(rowKey, columnKey));
}
});
});
}
@Override public String describeConflicts() {
public void close() throws E { return "it's complex...";
// Closing a AbstractUnitOfWork will abort iff we've not already aborted or }
// committed this unit of work.
if (aborted == false && committed == false) {
abort();
}
}
public boolean hasAborted() { @Override
return aborted; public void close() throws E {
} // Closing a AbstractUnitOfWork will abort iff we've not already aborted or
// committed this unit of work.
if (aborted == false && committed == false) {
abort();
}
}
public boolean hasCommitted() { public boolean hasAborted() {
return committed; return aborted;
} }
public boolean hasCommitted() {
return committed;
}
} }

View file

@ -16,5 +16,8 @@
package net.helenus.core; package net.helenus.core;
public enum AutoDdl { public enum AutoDdl {
VALIDATE, UPDATE, CREATE, CREATE_DROP; VALIDATE,
UPDATE,
CREATE,
CREATE_DROP;
} }

View file

@ -2,5 +2,5 @@ package net.helenus.core;
@FunctionalInterface @FunctionalInterface
public interface CommitThunk { public interface CommitThunk {
void apply(); void apply();
} }

View file

@ -18,9 +18,9 @@ package net.helenus.core;
public class ConflictingUnitOfWorkException extends Exception { public class ConflictingUnitOfWorkException extends Exception {
final UnitOfWork uow; final UnitOfWork uow;
ConflictingUnitOfWorkException(UnitOfWork uow) { ConflictingUnitOfWorkException(UnitOfWork uow) {
this.uow = uow; this.uow = uow;
} }
} }

View file

@ -15,13 +15,15 @@
*/ */
package net.helenus.core; package net.helenus.core;
import java.util.Optional;
import com.datastax.driver.core.Metadata; import com.datastax.driver.core.Metadata;
import java.util.Optional;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
public interface DslInstantiator { public interface DslInstantiator {
<E> E instantiate(Class<E> iface, ClassLoader classLoader, Optional<HelenusPropertyNode> parent, Metadata metadata); <E> E instantiate(
Class<E> iface,
ClassLoader classLoader,
Optional<HelenusPropertyNode> parent,
Metadata metadata);
} }

View file

@ -15,102 +15,101 @@
*/ */
package net.helenus.core; package net.helenus.core;
import java.util.Objects;
import com.datastax.driver.core.querybuilder.Clause; import com.datastax.driver.core.querybuilder.Clause;
import java.util.Objects;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.MappingUtil; import net.helenus.mapping.MappingUtil;
import net.helenus.mapping.value.ColumnValuePreparer; import net.helenus.mapping.value.ColumnValuePreparer;
public final class Filter<V> { public final class Filter<V> {
private final HelenusPropertyNode node; private final HelenusPropertyNode node;
private final Postulate<V> postulate; private final Postulate<V> postulate;
private Filter(HelenusPropertyNode node, Postulate<V> postulate) { private Filter(HelenusPropertyNode node, Postulate<V> postulate) {
this.node = node; this.node = node;
this.postulate = postulate; this.postulate = postulate;
} }
public static <V> Filter<V> equal(Getter<V> getter, V val) { public static <V> Filter<V> equal(Getter<V> getter, V val) {
return create(getter, Operator.EQ, val); return create(getter, Operator.EQ, val);
} }
public static <V> Filter<V> in(Getter<V> getter, V... vals) { public static <V> Filter<V> in(Getter<V> getter, V... vals) {
Objects.requireNonNull(getter, "empty getter"); Objects.requireNonNull(getter, "empty getter");
Objects.requireNonNull(vals, "empty values"); Objects.requireNonNull(vals, "empty values");
if (vals.length == 0) { if (vals.length == 0) {
throw new IllegalArgumentException("values array is empty"); throw new IllegalArgumentException("values array is empty");
} }
for (int i = 0; i != vals.length; ++i) { for (int i = 0; i != vals.length; ++i) {
Objects.requireNonNull(vals[i], "value[" + i + "] is empty"); Objects.requireNonNull(vals[i], "value[" + i + "] is empty");
} }
HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter); HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter);
Postulate<V> postulate = Postulate.of(Operator.IN, vals); Postulate<V> postulate = Postulate.of(Operator.IN, vals);
return new Filter<V>(node, postulate); return new Filter<V>(node, postulate);
} }
public static <V> Filter<V> greaterThan(Getter<V> getter, V val) { public static <V> Filter<V> greaterThan(Getter<V> getter, V val) {
return create(getter, Operator.GT, val); return create(getter, Operator.GT, val);
} }
public static <V> Filter<V> lessThan(Getter<V> getter, V val) { public static <V> Filter<V> lessThan(Getter<V> getter, V val) {
return create(getter, Operator.LT, val); return create(getter, Operator.LT, val);
} }
public static <V> Filter<V> greaterThanOrEqual(Getter<V> getter, V val) { public static <V> Filter<V> greaterThanOrEqual(Getter<V> getter, V val) {
return create(getter, Operator.GTE, val); return create(getter, Operator.GTE, val);
} }
public static <V> Filter<V> lessThanOrEqual(Getter<V> getter, V val) { public static <V> Filter<V> lessThanOrEqual(Getter<V> getter, V val) {
return create(getter, Operator.LTE, val); return create(getter, Operator.LTE, val);
} }
public static <V> Filter<V> create(Getter<V> getter, Postulate<V> postulate) { public static <V> Filter<V> create(Getter<V> getter, Postulate<V> postulate) {
Objects.requireNonNull(getter, "empty getter"); Objects.requireNonNull(getter, "empty getter");
Objects.requireNonNull(postulate, "empty operator"); Objects.requireNonNull(postulate, "empty operator");
HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter); HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter);
return new Filter<V>(node, postulate); return new Filter<V>(node, postulate);
} }
public static <V> Filter<V> create(Getter<V> getter, Operator op, V val) { public static <V> Filter<V> create(Getter<V> getter, Operator op, V val) {
Objects.requireNonNull(getter, "empty getter"); Objects.requireNonNull(getter, "empty getter");
Objects.requireNonNull(op, "empty op"); Objects.requireNonNull(op, "empty op");
Objects.requireNonNull(val, "empty value"); Objects.requireNonNull(val, "empty value");
if (op == Operator.IN) { if (op == Operator.IN) {
throw new IllegalArgumentException("invalid usage of the 'in' operator, use Filter.in() static method"); throw new IllegalArgumentException(
} "invalid usage of the 'in' operator, use Filter.in() static method");
}
HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter); HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter);
Postulate<V> postulate = Postulate.of(op, val); Postulate<V> postulate = Postulate.of(op, val);
return new Filter<V>(node, postulate); return new Filter<V>(node, postulate);
} }
public HelenusPropertyNode getNode() { public HelenusPropertyNode getNode() {
return node; return node;
} }
public Clause getClause(ColumnValuePreparer valuePreparer) { public Clause getClause(ColumnValuePreparer valuePreparer) {
return postulate.getClause(node, valuePreparer); return postulate.getClause(node, valuePreparer);
} }
public V[] postulateValues() { public V[] postulateValues() {
return postulate.values(); return postulate.values();
} }
@Override @Override
public String toString() { public String toString() {
return node.getColumnName() + postulate.toString(); return node.getColumnName() + postulate.toString();
} }
} }

View file

@ -17,5 +17,5 @@ package net.helenus.core;
public interface Getter<V> { public interface Getter<V> {
V get(); V get();
} }

View file

@ -15,17 +15,15 @@
*/ */
package net.helenus.core; package net.helenus.core;
import com.datastax.driver.core.Cluster;
import com.datastax.driver.core.Metadata;
import com.datastax.driver.core.Session;
import java.util.HashSet; import java.util.HashSet;
import java.util.Map; import java.util.Map;
import java.util.Optional; import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentMap;
import com.datastax.driver.core.Cluster;
import com.datastax.driver.core.Metadata;
import com.datastax.driver.core.Session;
import net.helenus.config.DefaultHelenusSettings; import net.helenus.config.DefaultHelenusSettings;
import net.helenus.config.HelenusSettings; import net.helenus.config.HelenusSettings;
import net.helenus.core.reflect.DslExportable; import net.helenus.core.reflect.DslExportable;
@ -35,161 +33,166 @@ import net.helenus.support.HelenusMappingException;
public final class Helenus { public final class Helenus {
private static final ConcurrentMap<Class<?>, Object> dslCache = new ConcurrentHashMap<Class<?>, Object>(); private static final ConcurrentMap<Class<?>, Object> dslCache =
private static final ConcurrentMap<Class<?>, Metadata> metadataForEntity = new ConcurrentHashMap<Class<?>, Metadata>(); new ConcurrentHashMap<Class<?>, Object>();
private static final Set<HelenusSession> sessions = new HashSet<HelenusSession>(); private static final ConcurrentMap<Class<?>, Metadata> metadataForEntity =
private static volatile HelenusSettings settings = new DefaultHelenusSettings(); new ConcurrentHashMap<Class<?>, Metadata>();
private static volatile HelenusSession singleton; private static final Set<HelenusSession> sessions = new HashSet<HelenusSession>();
private static volatile HelenusSettings settings = new DefaultHelenusSettings();
private static volatile HelenusSession singleton;
private Helenus() { private Helenus() {}
}
protected static void setSession(HelenusSession session) { protected static void setSession(HelenusSession session) {
sessions.add(session); sessions.add(session);
singleton = session; singleton = session;
} }
public static HelenusSession session() { public static HelenusSession session() {
return singleton; return singleton;
} }
public static void shutdown() { public static void shutdown() {
sessions.forEach((session) -> { sessions.forEach(
session.close(); (session) -> {
sessions.remove(session); session.close();
}); sessions.remove(session);
dslCache.clear(); });
} dslCache.clear();
}
public static HelenusSettings settings() { public static HelenusSettings settings() {
return settings; return settings;
} }
public static HelenusSettings settings(HelenusSettings overrideSettings) { public static HelenusSettings settings(HelenusSettings overrideSettings) {
HelenusSettings old = settings; HelenusSettings old = settings;
settings = overrideSettings; settings = overrideSettings;
return old; return old;
} }
public static SessionInitializer connect(Cluster cluster) { public static SessionInitializer connect(Cluster cluster) {
Session session = cluster.connect(); Session session = cluster.connect();
return new SessionInitializer(session); return new SessionInitializer(session);
} }
public static SessionInitializer connect(Cluster cluster, String keyspace) { public static SessionInitializer connect(Cluster cluster, String keyspace) {
Session session = cluster.connect(keyspace); Session session = cluster.connect(keyspace);
return new SessionInitializer(session); return new SessionInitializer(session);
} }
public static SessionInitializer init(Session session) { public static SessionInitializer init(Session session) {
if (session == null) { if (session == null) {
throw new IllegalArgumentException("empty session"); throw new IllegalArgumentException("empty session");
} }
return new SessionInitializer(session); return new SessionInitializer(session);
} }
public static void clearDslCache() { public static void clearDslCache() {
dslCache.clear(); dslCache.clear();
} }
public static <E> E dsl(Class<E> iface) { public static <E> E dsl(Class<E> iface) {
return dsl(iface, null); return dsl(iface, null);
} }
public static <E> E dsl(Class<E> iface, Metadata metadata) { public static <E> E dsl(Class<E> iface, Metadata metadata) {
return dsl(iface, iface.getClassLoader(), Optional.empty(), metadata); return dsl(iface, iface.getClassLoader(), Optional.empty(), metadata);
} }
public static <E> E dsl(Class<E> iface, ClassLoader classLoader, Metadata metadata) { public static <E> E dsl(Class<E> iface, ClassLoader classLoader, Metadata metadata) {
return dsl(iface, classLoader, Optional.empty(), metadata); return dsl(iface, classLoader, Optional.empty(), metadata);
} }
public static <E> E dsl(Class<E> iface, ClassLoader classLoader, Optional<HelenusPropertyNode> parent, public static <E> E dsl(
Metadata metadata) { Class<E> iface,
ClassLoader classLoader,
Optional<HelenusPropertyNode> parent,
Metadata metadata) {
Object instance = null; Object instance = null;
if (!parent.isPresent()) { if (!parent.isPresent()) {
instance = dslCache.get(iface); instance = dslCache.get(iface);
} }
if (instance == null) { if (instance == null) {
instance = settings.getDslInstantiator().instantiate(iface, classLoader, parent, metadata); instance = settings.getDslInstantiator().instantiate(iface, classLoader, parent, metadata);
if (!parent.isPresent()) { if (!parent.isPresent()) {
Object c = dslCache.putIfAbsent(iface, instance); Object c = dslCache.putIfAbsent(iface, instance);
if (c != null) { if (c != null) {
instance = c; instance = c;
} }
} }
} }
return (E) instance; return (E) instance;
} }
public static <E> E map(Class<E> iface, Map<String, Object> src) { public static <E> E map(Class<E> iface, Map<String, Object> src) {
return map(iface, src, iface.getClassLoader()); return map(iface, src, iface.getClassLoader());
} }
public static <E> E map(Class<E> iface, Map<String, Object> src, ClassLoader classLoader) { public static <E> E map(Class<E> iface, Map<String, Object> src, ClassLoader classLoader) {
return settings.getMapperInstantiator().instantiate(iface, src, classLoader); return settings.getMapperInstantiator().instantiate(iface, src, classLoader);
} }
public static HelenusEntity entity(Class<?> iface) { public static HelenusEntity entity(Class<?> iface) {
Metadata metadata = metadataForEntity.get(iface); Metadata metadata = metadataForEntity.get(iface);
if (metadata == null) { if (metadata == null) {
HelenusSession session = session(); HelenusSession session = session();
if (session != null) { if (session != null) {
metadata = session.getMetadata(); metadata = session.getMetadata();
} }
} }
return entity(iface, metadata); return entity(iface, metadata);
} }
public static HelenusEntity entity(Class<?> iface, Metadata metadata) { public static HelenusEntity entity(Class<?> iface, Metadata metadata) {
Object dsl = dsl(iface, metadata); Object dsl = dsl(iface, metadata);
DslExportable e = (DslExportable) dsl; DslExportable e = (DslExportable) dsl;
return e.getHelenusMappingEntity(); return e.getHelenusMappingEntity();
} }
public static HelenusEntity resolve(Object ifaceOrDsl) { public static HelenusEntity resolve(Object ifaceOrDsl) {
return resolve(ifaceOrDsl, metadataForEntity.get(ifaceOrDsl)); return resolve(ifaceOrDsl, metadataForEntity.get(ifaceOrDsl));
} }
public static HelenusEntity resolve(Object ifaceOrDsl, Metadata metadata) { public static HelenusEntity resolve(Object ifaceOrDsl, Metadata metadata) {
if (ifaceOrDsl == null) { if (ifaceOrDsl == null) {
throw new HelenusMappingException("ifaceOrDsl is null"); throw new HelenusMappingException("ifaceOrDsl is null");
} }
if (ifaceOrDsl instanceof DslExportable) { if (ifaceOrDsl instanceof DslExportable) {
DslExportable e = (DslExportable) ifaceOrDsl; DslExportable e = (DslExportable) ifaceOrDsl;
return e.getHelenusMappingEntity(); return e.getHelenusMappingEntity();
} }
if (ifaceOrDsl instanceof Class) { if (ifaceOrDsl instanceof Class) {
Class<?> iface = (Class<?>) ifaceOrDsl; Class<?> iface = (Class<?>) ifaceOrDsl;
if (!iface.isInterface()) { if (!iface.isInterface()) {
throw new HelenusMappingException("class is not an interface " + iface); throw new HelenusMappingException("class is not an interface " + iface);
} }
if (metadata != null) { if (metadata != null) {
metadataForEntity.putIfAbsent(iface, metadata); metadataForEntity.putIfAbsent(iface, metadata);
} }
return entity(iface, metadata); return entity(iface, metadata);
} }
throw new HelenusMappingException("unknown dsl object or mapping interface " + ifaceOrDsl); throw new HelenusMappingException("unknown dsl object or mapping interface " + ifaceOrDsl);
} }
} }

File diff suppressed because it is too large Load diff

View file

@ -16,33 +16,32 @@
package net.helenus.core; package net.helenus.core;
import java.lang.annotation.Annotation; import java.lang.annotation.Annotation;
import javax.validation.ConstraintValidator; import javax.validation.ConstraintValidator;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
import net.helenus.support.HelenusException; import net.helenus.support.HelenusException;
import net.helenus.support.HelenusMappingException; import net.helenus.support.HelenusMappingException;
public enum HelenusValidator implements PropertyValueValidator { public enum HelenusValidator implements PropertyValueValidator {
INSTANCE; INSTANCE;
public void validate(HelenusProperty prop, Object value) { public void validate(HelenusProperty prop, Object value) {
for (ConstraintValidator<? extends Annotation, ?> validator : prop.getValidators()) { for (ConstraintValidator<? extends Annotation, ?> validator : prop.getValidators()) {
ConstraintValidator typeless = (ConstraintValidator) validator; ConstraintValidator typeless = (ConstraintValidator) validator;
boolean valid = false; boolean valid = false;
try { try {
valid = typeless.isValid(value, null); valid = typeless.isValid(value, null);
} catch (ClassCastException e) { } catch (ClassCastException e) {
throw new HelenusMappingException("validator was used for wrong type '" + value + "' in " + prop, e); throw new HelenusMappingException(
} "validator was used for wrong type '" + value + "' in " + prop, e);
}
if (!valid) { if (!valid) {
throw new HelenusException("wrong value '" + value + "' for " + prop); throw new HelenusException("wrong value '" + value + "' for " + prop);
} }
} }
} }
} }

View file

@ -19,5 +19,5 @@ import java.util.Map;
public interface MapperInstantiator { public interface MapperInstantiator {
<E> E instantiate(Class<E> iface, Map<String, Object> src, ClassLoader classLoader); <E> E instantiate(Class<E> iface, Map<String, Object> src, ClassLoader classLoader);
} }

View file

@ -15,10 +15,8 @@
*/ */
package net.helenus.core; package net.helenus.core;
import java.util.function.Function;
import com.datastax.driver.core.Row; import com.datastax.driver.core.Row;
import java.util.function.Function;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
import net.helenus.mapping.value.ColumnValueProvider; import net.helenus.mapping.value.ColumnValueProvider;
@ -26,161 +24,203 @@ import net.helenus.support.Fun;
public final class Mappers { public final class Mappers {
private Mappers() { private Mappers() {}
}
public static final class Mapper1<A> implements Function<Row, Fun.Tuple1<A>> { public static final class Mapper1<A> implements Function<Row, Fun.Tuple1<A>> {
private final ColumnValueProvider provider; private final ColumnValueProvider provider;
private final HelenusProperty p1; private final HelenusProperty p1;
public Mapper1(ColumnValueProvider provider, HelenusPropertyNode p1) { public Mapper1(ColumnValueProvider provider, HelenusPropertyNode p1) {
this.provider = provider; this.provider = provider;
this.p1 = p1.getProperty(); this.p1 = p1.getProperty();
} }
@Override @Override
public Fun.Tuple1<A> apply(Row row) { public Fun.Tuple1<A> apply(Row row) {
return new Fun.Tuple1<A>(provider.getColumnValue(row, 0, p1)); return new Fun.Tuple1<A>(provider.getColumnValue(row, 0, p1));
} }
} }
public static final class Mapper2<A, B> implements Function<Row, Fun.Tuple2<A, B>> { public static final class Mapper2<A, B> implements Function<Row, Fun.Tuple2<A, B>> {
private final ColumnValueProvider provider; private final ColumnValueProvider provider;
private final HelenusProperty p1; private final HelenusProperty p1;
private final HelenusProperty p2; private final HelenusProperty p2;
public Mapper2(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2) { public Mapper2(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2) {
this.provider = provider; this.provider = provider;
this.p1 = p1.getProperty(); this.p1 = p1.getProperty();
this.p2 = p2.getProperty(); this.p2 = p2.getProperty();
} }
@Override @Override
public Fun.Tuple2<A, B> apply(Row row) { public Fun.Tuple2<A, B> apply(Row row) {
return new Fun.Tuple2<A, B>(provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2)); return new Fun.Tuple2<A, B>(
} provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2));
} }
}
public static final class Mapper3<A, B, C> implements Function<Row, Fun.Tuple3<A, B, C>> { public static final class Mapper3<A, B, C> implements Function<Row, Fun.Tuple3<A, B, C>> {
private final ColumnValueProvider provider; private final ColumnValueProvider provider;
private final HelenusProperty p1; private final HelenusProperty p1;
private final HelenusProperty p2; private final HelenusProperty p2;
private final HelenusProperty p3; private final HelenusProperty p3;
public Mapper3(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2, public Mapper3(
HelenusPropertyNode p3) { ColumnValueProvider provider,
this.provider = provider; HelenusPropertyNode p1,
this.p1 = p1.getProperty(); HelenusPropertyNode p2,
this.p2 = p2.getProperty(); HelenusPropertyNode p3) {
this.p3 = p3.getProperty(); this.provider = provider;
} this.p1 = p1.getProperty();
this.p2 = p2.getProperty();
this.p3 = p3.getProperty();
}
@Override @Override
public Fun.Tuple3<A, B, C> apply(Row row) { public Fun.Tuple3<A, B, C> apply(Row row) {
return new Fun.Tuple3<A, B, C>(provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2), return new Fun.Tuple3<A, B, C>(
provider.getColumnValue(row, 2, p3)); provider.getColumnValue(row, 0, p1),
} provider.getColumnValue(row, 1, p2),
} provider.getColumnValue(row, 2, p3));
}
}
public static final class Mapper4<A, B, C, D> implements Function<Row, Fun.Tuple4<A, B, C, D>> { public static final class Mapper4<A, B, C, D> implements Function<Row, Fun.Tuple4<A, B, C, D>> {
private final ColumnValueProvider provider; private final ColumnValueProvider provider;
private final HelenusProperty p1; private final HelenusProperty p1;
private final HelenusProperty p2; private final HelenusProperty p2;
private final HelenusProperty p3; private final HelenusProperty p3;
private final HelenusProperty p4; private final HelenusProperty p4;
public Mapper4(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2, public Mapper4(
HelenusPropertyNode p3, HelenusPropertyNode p4) { ColumnValueProvider provider,
this.provider = provider; HelenusPropertyNode p1,
this.p1 = p1.getProperty(); HelenusPropertyNode p2,
this.p2 = p2.getProperty(); HelenusPropertyNode p3,
this.p3 = p3.getProperty(); HelenusPropertyNode p4) {
this.p4 = p4.getProperty(); this.provider = provider;
} this.p1 = p1.getProperty();
this.p2 = p2.getProperty();
this.p3 = p3.getProperty();
this.p4 = p4.getProperty();
}
@Override @Override
public Fun.Tuple4<A, B, C, D> apply(Row row) { public Fun.Tuple4<A, B, C, D> apply(Row row) {
return new Fun.Tuple4<A, B, C, D>(provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2), return new Fun.Tuple4<A, B, C, D>(
provider.getColumnValue(row, 2, p3), provider.getColumnValue(row, 3, p4)); provider.getColumnValue(row, 0, p1),
} provider.getColumnValue(row, 1, p2),
} provider.getColumnValue(row, 2, p3),
provider.getColumnValue(row, 3, p4));
}
}
public static final class Mapper5<A, B, C, D, E> implements Function<Row, Fun.Tuple5<A, B, C, D, E>> { public static final class Mapper5<A, B, C, D, E>
implements Function<Row, Fun.Tuple5<A, B, C, D, E>> {
private final ColumnValueProvider provider; private final ColumnValueProvider provider;
private final HelenusProperty p1, p2, p3, p4, p5; private final HelenusProperty p1, p2, p3, p4, p5;
public Mapper5(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2, public Mapper5(
HelenusPropertyNode p3, HelenusPropertyNode p4, HelenusPropertyNode p5) { ColumnValueProvider provider,
this.provider = provider; HelenusPropertyNode p1,
this.p1 = p1.getProperty(); HelenusPropertyNode p2,
this.p2 = p2.getProperty(); HelenusPropertyNode p3,
this.p3 = p3.getProperty(); HelenusPropertyNode p4,
this.p4 = p4.getProperty(); HelenusPropertyNode p5) {
this.p5 = p5.getProperty(); this.provider = provider;
} this.p1 = p1.getProperty();
this.p2 = p2.getProperty();
this.p3 = p3.getProperty();
this.p4 = p4.getProperty();
this.p5 = p5.getProperty();
}
@Override @Override
public Fun.Tuple5<A, B, C, D, E> apply(Row row) { public Fun.Tuple5<A, B, C, D, E> apply(Row row) {
return new Fun.Tuple5<A, B, C, D, E>(provider.getColumnValue(row, 0, p1), return new Fun.Tuple5<A, B, C, D, E>(
provider.getColumnValue(row, 1, p2), provider.getColumnValue(row, 2, p3), provider.getColumnValue(row, 0, p1),
provider.getColumnValue(row, 3, p4), provider.getColumnValue(row, 4, p5)); provider.getColumnValue(row, 1, p2),
} provider.getColumnValue(row, 2, p3),
} provider.getColumnValue(row, 3, p4),
provider.getColumnValue(row, 4, p5));
}
}
public static final class Mapper6<A, B, C, D, E, F> implements Function<Row, Fun.Tuple6<A, B, C, D, E, F>> { public static final class Mapper6<A, B, C, D, E, F>
implements Function<Row, Fun.Tuple6<A, B, C, D, E, F>> {
private final ColumnValueProvider provider; private final ColumnValueProvider provider;
private final HelenusProperty p1, p2, p3, p4, p5, p6; private final HelenusProperty p1, p2, p3, p4, p5, p6;
public Mapper6(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2, public Mapper6(
HelenusPropertyNode p3, HelenusPropertyNode p4, HelenusPropertyNode p5, HelenusPropertyNode p6) { ColumnValueProvider provider,
this.provider = provider; HelenusPropertyNode p1,
this.p1 = p1.getProperty(); HelenusPropertyNode p2,
this.p2 = p2.getProperty(); HelenusPropertyNode p3,
this.p3 = p3.getProperty(); HelenusPropertyNode p4,
this.p4 = p4.getProperty(); HelenusPropertyNode p5,
this.p5 = p5.getProperty(); HelenusPropertyNode p6) {
this.p6 = p6.getProperty(); this.provider = provider;
} this.p1 = p1.getProperty();
this.p2 = p2.getProperty();
this.p3 = p3.getProperty();
this.p4 = p4.getProperty();
this.p5 = p5.getProperty();
this.p6 = p6.getProperty();
}
@Override @Override
public Fun.Tuple6<A, B, C, D, E, F> apply(Row row) { public Fun.Tuple6<A, B, C, D, E, F> apply(Row row) {
return new Fun.Tuple6<A, B, C, D, E, F>(provider.getColumnValue(row, 0, p1), return new Fun.Tuple6<A, B, C, D, E, F>(
provider.getColumnValue(row, 1, p2), provider.getColumnValue(row, 2, p3), provider.getColumnValue(row, 0, p1),
provider.getColumnValue(row, 3, p4), provider.getColumnValue(row, 4, p5), provider.getColumnValue(row, 1, p2),
provider.getColumnValue(row, 5, p6)); provider.getColumnValue(row, 2, p3),
} provider.getColumnValue(row, 3, p4),
} provider.getColumnValue(row, 4, p5),
provider.getColumnValue(row, 5, p6));
}
}
public static final class Mapper7<A, B, C, D, E, F, G> implements Function<Row, Fun.Tuple7<A, B, C, D, E, F, G>> { public static final class Mapper7<A, B, C, D, E, F, G>
implements Function<Row, Fun.Tuple7<A, B, C, D, E, F, G>> {
private final ColumnValueProvider provider; private final ColumnValueProvider provider;
private final HelenusProperty p1, p2, p3, p4, p5, p6, p7; private final HelenusProperty p1, p2, p3, p4, p5, p6, p7;
public Mapper7(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2, public Mapper7(
HelenusPropertyNode p3, HelenusPropertyNode p4, HelenusPropertyNode p5, HelenusPropertyNode p6, ColumnValueProvider provider,
HelenusPropertyNode p7) { HelenusPropertyNode p1,
this.provider = provider; HelenusPropertyNode p2,
this.p1 = p1.getProperty(); HelenusPropertyNode p3,
this.p2 = p2.getProperty(); HelenusPropertyNode p4,
this.p3 = p3.getProperty(); HelenusPropertyNode p5,
this.p4 = p4.getProperty(); HelenusPropertyNode p6,
this.p5 = p5.getProperty(); HelenusPropertyNode p7) {
this.p6 = p6.getProperty(); this.provider = provider;
this.p7 = p7.getProperty(); this.p1 = p1.getProperty();
} this.p2 = p2.getProperty();
this.p3 = p3.getProperty();
this.p4 = p4.getProperty();
this.p5 = p5.getProperty();
this.p6 = p6.getProperty();
this.p7 = p7.getProperty();
}
@Override @Override
public Fun.Tuple7<A, B, C, D, E, F, G> apply(Row row) { public Fun.Tuple7<A, B, C, D, E, F, G> apply(Row row) {
return new Fun.Tuple7<A, B, C, D, E, F, G>(provider.getColumnValue(row, 0, p1), return new Fun.Tuple7<A, B, C, D, E, F, G>(
provider.getColumnValue(row, 1, p2), provider.getColumnValue(row, 2, p3), provider.getColumnValue(row, 0, p1),
provider.getColumnValue(row, 3, p4), provider.getColumnValue(row, 4, p5), provider.getColumnValue(row, 1, p2),
provider.getColumnValue(row, 5, p6), provider.getColumnValue(row, 6, p7)); provider.getColumnValue(row, 2, p3),
} provider.getColumnValue(row, 3, p4),
} provider.getColumnValue(row, 4, p5),
provider.getColumnValue(row, 5, p6),
provider.getColumnValue(row, 6, p7));
}
}
} }

View file

@ -19,37 +19,37 @@ import java.util.HashMap;
import java.util.Map; import java.util.Map;
public enum Operator { public enum Operator {
EQ("=="), EQ("=="),
IN("in"), IN("in"),
GT(">"), GT(">"),
LT("<"), LT("<"),
GTE(">="), GTE(">="),
LTE("<="); LTE("<=");
private static final Map<String, Operator> indexByName = new HashMap<String, Operator>(); private static final Map<String, Operator> indexByName = new HashMap<String, Operator>();
static { static {
for (Operator fo : Operator.values()) { for (Operator fo : Operator.values()) {
indexByName.put(fo.getName(), fo); indexByName.put(fo.getName(), fo);
} }
} }
private final String name; private final String name;
private Operator(String name) { private Operator(String name) {
this.name = name; this.name = name;
} }
public static Operator findByOperator(String name) { public static Operator findByOperator(String name) {
return indexByName.get(name); return indexByName.get(name);
} }
public String getName() { public String getName() {
return name; return name;
} }
} }

View file

@ -1,10 +1,8 @@
package net.helenus.core; package net.helenus.core;
import java.util.Objects;
import com.datastax.driver.core.querybuilder.Ordering; import com.datastax.driver.core.querybuilder.Ordering;
import com.datastax.driver.core.querybuilder.QueryBuilder; import com.datastax.driver.core.querybuilder.QueryBuilder;
import java.util.Objects;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.ColumnType; import net.helenus.mapping.ColumnType;
import net.helenus.mapping.MappingUtil; import net.helenus.mapping.MappingUtil;
@ -13,34 +11,34 @@ import net.helenus.support.HelenusMappingException;
public final class Ordered { public final class Ordered {
private final Getter<?> getter; private final Getter<?> getter;
private final OrderingDirection direction; private final OrderingDirection direction;
public Ordered(Getter<?> getter, OrderingDirection direction) { public Ordered(Getter<?> getter, OrderingDirection direction) {
this.getter = getter; this.getter = getter;
this.direction = direction; this.direction = direction;
} }
public Ordering getOrdering() { public Ordering getOrdering() {
Objects.requireNonNull(getter, "property is null"); Objects.requireNonNull(getter, "property is null");
Objects.requireNonNull(direction, "direction is null"); Objects.requireNonNull(direction, "direction is null");
HelenusPropertyNode propNode = MappingUtil.resolveMappingProperty(getter); HelenusPropertyNode propNode = MappingUtil.resolveMappingProperty(getter);
if (propNode.getProperty().getColumnType() != ColumnType.CLUSTERING_COLUMN) { if (propNode.getProperty().getColumnType() != ColumnType.CLUSTERING_COLUMN) {
throw new HelenusMappingException( throw new HelenusMappingException(
"property must be a clustering column " + propNode.getProperty().getPropertyName()); "property must be a clustering column " + propNode.getProperty().getPropertyName());
} }
switch (direction) { switch (direction) {
case ASC : case ASC:
return QueryBuilder.asc(propNode.getColumnName()); return QueryBuilder.asc(propNode.getColumnName());
case DESC : case DESC:
return QueryBuilder.desc(propNode.getColumnName()); return QueryBuilder.desc(propNode.getColumnName());
} }
throw new HelenusMappingException("invalid direction " + direction); throw new HelenusMappingException("invalid direction " + direction);
} }
} }

View file

@ -5,25 +5,25 @@ import java.util.Objects;
public class PostCommitFunction<T, R> implements java.util.function.Function<T, R> { public class PostCommitFunction<T, R> implements java.util.function.Function<T, R> {
private final UnitOfWork uow; private final UnitOfWork uow;
private final List<CommitThunk> postCommit; private final List<CommitThunk> postCommit;
PostCommitFunction(UnitOfWork uow, List<CommitThunk> postCommit) { PostCommitFunction(UnitOfWork uow, List<CommitThunk> postCommit) {
this.uow = uow; this.uow = uow;
this.postCommit = postCommit; this.postCommit = postCommit;
} }
public void andThen(CommitThunk after) { public void andThen(CommitThunk after) {
Objects.requireNonNull(after); Objects.requireNonNull(after);
if (postCommit == null) { if (postCommit == null) {
after.apply(); after.apply();
} else { } else {
postCommit.add(after); postCommit.add(after);
} }
} }
@Override @Override
public R apply(T t) { public R apply(T t) {
return null; return null;
} }
} }

View file

@ -17,85 +17,84 @@ package net.helenus.core;
import com.datastax.driver.core.querybuilder.Clause; import com.datastax.driver.core.querybuilder.Clause;
import com.datastax.driver.core.querybuilder.QueryBuilder; import com.datastax.driver.core.querybuilder.QueryBuilder;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.value.ColumnValuePreparer; import net.helenus.mapping.value.ColumnValuePreparer;
import net.helenus.support.HelenusMappingException; import net.helenus.support.HelenusMappingException;
public final class Postulate<V> { public final class Postulate<V> {
private final Operator operator; private final Operator operator;
private final V[] values; private final V[] values;
protected Postulate(Operator op, V[] values) { protected Postulate(Operator op, V[] values) {
this.operator = op; this.operator = op;
this.values = values; this.values = values;
} }
public static <V> Postulate<V> of(Operator op, V... values) { public static <V> Postulate<V> of(Operator op, V... values) {
return new Postulate<V>(op, values); return new Postulate<V>(op, values);
} }
public Clause getClause(HelenusPropertyNode node, ColumnValuePreparer valuePreparer) { public Clause getClause(HelenusPropertyNode node, ColumnValuePreparer valuePreparer) {
switch (operator) { switch (operator) {
case EQ : case EQ:
return QueryBuilder.eq(node.getColumnName(), return QueryBuilder.eq(
valuePreparer.prepareColumnValue(values[0], node.getProperty())); node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty()));
case IN : case IN:
Object[] preparedValues = new Object[values.length]; Object[] preparedValues = new Object[values.length];
for (int i = 0; i != values.length; ++i) { for (int i = 0; i != values.length; ++i) {
preparedValues[i] = valuePreparer.prepareColumnValue(values[i], node.getProperty()); preparedValues[i] = valuePreparer.prepareColumnValue(values[i], node.getProperty());
} }
return QueryBuilder.in(node.getColumnName(), preparedValues); return QueryBuilder.in(node.getColumnName(), preparedValues);
case LT : case LT:
return QueryBuilder.lt(node.getColumnName(), return QueryBuilder.lt(
valuePreparer.prepareColumnValue(values[0], node.getProperty())); node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty()));
case LTE : case LTE:
return QueryBuilder.lte(node.getColumnName(), return QueryBuilder.lte(
valuePreparer.prepareColumnValue(values[0], node.getProperty())); node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty()));
case GT : case GT:
return QueryBuilder.gt(node.getColumnName(), return QueryBuilder.gt(
valuePreparer.prepareColumnValue(values[0], node.getProperty())); node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty()));
case GTE : case GTE:
return QueryBuilder.gte(node.getColumnName(), return QueryBuilder.gte(
valuePreparer.prepareColumnValue(values[0], node.getProperty())); node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty()));
default : default:
throw new HelenusMappingException("unknown filter operation " + operator); throw new HelenusMappingException("unknown filter operation " + operator);
} }
} }
public V[] values() { public V[] values() {
return values; return values;
} }
@Override @Override
public String toString() { public String toString() {
if (operator == Operator.IN) { if (operator == Operator.IN) {
if (values == null) { if (values == null) {
return "in()"; return "in()";
} }
int len = values.length; int len = values.length;
StringBuilder b = new StringBuilder(); StringBuilder b = new StringBuilder();
b.append("in("); b.append("in(");
for (int i = 0; i != len; i++) { for (int i = 0; i != len; i++) {
if (b.length() > 3) { if (b.length() > 3) {
b.append(", "); b.append(", ");
} }
b.append(String.valueOf(values[i])); b.append(String.valueOf(values[i]));
} }
return b.append(')').toString(); return b.append(')').toString();
} }
return operator.getName() + values[0]; return operator.getName() + values[0];
} }
} }

View file

@ -19,5 +19,5 @@ import net.helenus.mapping.HelenusProperty;
public interface PropertyValueValidator { public interface PropertyValueValidator {
void validate(HelenusProperty prop, Object value); void validate(HelenusProperty prop, Object value);
} }

View file

@ -15,83 +15,80 @@
*/ */
package net.helenus.core; package net.helenus.core;
import com.datastax.driver.core.querybuilder.BindMarker;
import com.datastax.driver.core.querybuilder.QueryBuilder;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import com.datastax.driver.core.querybuilder.BindMarker;
import com.datastax.driver.core.querybuilder.QueryBuilder;
import net.helenus.mapping.OrderingDirection; import net.helenus.mapping.OrderingDirection;
/** Sugar methods for the queries */ /** Sugar methods for the queries */
public final class Query { public final class Query {
private Query() { private Query() {}
}
public static BindMarker marker() { public static BindMarker marker() {
return QueryBuilder.bindMarker(); return QueryBuilder.bindMarker();
} }
public static BindMarker marker(String name) { public static BindMarker marker(String name) {
return QueryBuilder.bindMarker(name); return QueryBuilder.bindMarker(name);
} }
public static Ordered asc(Getter<?> getter) { public static Ordered asc(Getter<?> getter) {
return new Ordered(getter, OrderingDirection.ASC); return new Ordered(getter, OrderingDirection.ASC);
} }
public static Ordered desc(Getter<?> getter) { public static Ordered desc(Getter<?> getter) {
return new Ordered(getter, OrderingDirection.DESC); return new Ordered(getter, OrderingDirection.DESC);
} }
public static <V> Postulate<V> eq(V val) { public static <V> Postulate<V> eq(V val) {
return Postulate.of(Operator.EQ, val); return Postulate.of(Operator.EQ, val);
} }
public static <V> Postulate<V> lt(V val) { public static <V> Postulate<V> lt(V val) {
return Postulate.of(Operator.LT, val); return Postulate.of(Operator.LT, val);
} }
public static <V> Postulate<V> lte(V val) { public static <V> Postulate<V> lte(V val) {
return Postulate.of(Operator.LTE, val); return Postulate.of(Operator.LTE, val);
} }
public static <V> Postulate<V> gt(V val) { public static <V> Postulate<V> gt(V val) {
return Postulate.of(Operator.GT, val); return Postulate.of(Operator.GT, val);
} }
public static <V> Postulate<V> gte(V val) { public static <V> Postulate<V> gte(V val) {
return Postulate.of(Operator.GTE, val); return Postulate.of(Operator.GTE, val);
} }
public static <V> Postulate<V> in(V[] vals) { public static <V> Postulate<V> in(V[] vals) {
return new Postulate<V>(Operator.IN, vals); return new Postulate<V>(Operator.IN, vals);
} }
public static <K, V> Getter<V> getIdx(Getter<List<V>> listGetter, int index) { public static <K, V> Getter<V> getIdx(Getter<List<V>> listGetter, int index) {
Objects.requireNonNull(listGetter, "listGetter is null"); Objects.requireNonNull(listGetter, "listGetter is null");
return new Getter<V>() { return new Getter<V>() {
@Override @Override
public V get() { public V get() {
return listGetter.get().get(index); return listGetter.get().get(index);
} }
}; };
} }
public static <K, V> Getter<V> get(Getter<Map<K, V>> mapGetter, K k) { public static <K, V> Getter<V> get(Getter<Map<K, V>> mapGetter, K k) {
Objects.requireNonNull(mapGetter, "mapGetter is null"); Objects.requireNonNull(mapGetter, "mapGetter is null");
Objects.requireNonNull(k, "key is null"); Objects.requireNonNull(k, "key is null");
return new Getter<V>() { return new Getter<V>() {
@Override @Override
public V get() { public V get() {
return mapGetter.get().get(k); return mapGetter.get().get(k);
} }
}; };
} }
} }

View file

@ -15,16 +15,14 @@
*/ */
package net.helenus.core; package net.helenus.core;
import java.util.*;
import java.util.stream.Collectors;
import com.datastax.driver.core.*; import com.datastax.driver.core.*;
import com.datastax.driver.core.querybuilder.IsNotNullClause; import com.datastax.driver.core.querybuilder.IsNotNullClause;
import com.datastax.driver.core.querybuilder.QueryBuilder; import com.datastax.driver.core.querybuilder.QueryBuilder;
import com.datastax.driver.core.querybuilder.Select; import com.datastax.driver.core.querybuilder.Select;
import com.datastax.driver.core.schemabuilder.*; import com.datastax.driver.core.schemabuilder.*;
import com.datastax.driver.core.schemabuilder.Create.Options; import com.datastax.driver.core.schemabuilder.Create.Options;
import java.util.*;
import java.util.stream.Collectors;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.*; import net.helenus.mapping.*;
import net.helenus.mapping.ColumnType; import net.helenus.mapping.ColumnType;
@ -35,392 +33,428 @@ import net.helenus.support.HelenusMappingException;
public final class SchemaUtil { public final class SchemaUtil {
private SchemaUtil() { private SchemaUtil() {}
}
public static RegularStatement use(String keyspace, boolean forceQuote) { public static RegularStatement use(String keyspace, boolean forceQuote) {
if (forceQuote) { if (forceQuote) {
return new SimpleStatement("USE" + CqlUtil.forceQuote(keyspace)); return new SimpleStatement("USE" + CqlUtil.forceQuote(keyspace));
} else { } else {
return new SimpleStatement("USE " + keyspace); return new SimpleStatement("USE " + keyspace);
} }
} }
public static SchemaStatement createUserType(HelenusEntity entity) { public static SchemaStatement createUserType(HelenusEntity entity) {
if (entity.getType() != HelenusEntityType.UDT) { if (entity.getType() != HelenusEntityType.UDT) {
throw new HelenusMappingException("expected UDT entity " + entity); throw new HelenusMappingException("expected UDT entity " + entity);
} }
CreateType create = SchemaBuilder.createType(entity.getName().toCql()); CreateType create = SchemaBuilder.createType(entity.getName().toCql());
for (HelenusProperty prop : entity.getOrderedProperties()) { for (HelenusProperty prop : entity.getOrderedProperties()) {
ColumnType columnType = prop.getColumnType(); ColumnType columnType = prop.getColumnType();
if (columnType == ColumnType.PARTITION_KEY || columnType == ColumnType.CLUSTERING_COLUMN) { if (columnType == ColumnType.PARTITION_KEY || columnType == ColumnType.CLUSTERING_COLUMN) {
throw new HelenusMappingException("primary key columns are not supported in UserDefinedType for " throw new HelenusMappingException(
+ prop.getPropertyName() + " in entity " + entity); "primary key columns are not supported in UserDefinedType for "
} + prop.getPropertyName()
+ " in entity "
+ entity);
}
try {
prop.getDataType().addColumn(create, prop.getColumnName());
} catch (IllegalArgumentException e) {
throw new HelenusMappingException(
"invalid column name '"
+ prop.getColumnName()
+ "' in entity '"
+ entity.getName().getName()
+ "'",
e);
}
}
return create;
}
public static List<SchemaStatement> alterUserType(
UserType userType, HelenusEntity entity, boolean dropUnusedColumns) {
if (entity.getType() != HelenusEntityType.UDT) {
throw new HelenusMappingException("expected UDT entity " + entity);
}
List<SchemaStatement> result = new ArrayList<SchemaStatement>();
/**
* TODO: In future replace SchemaBuilder.alterTable by SchemaBuilder.alterType when it will
* exist
*/
Alter alter = SchemaBuilder.alterTable(entity.getName().toCql());
final Set<String> visitedColumns =
dropUnusedColumns ? new HashSet<String>() : Collections.<String>emptySet();
for (HelenusProperty prop : entity.getOrderedProperties()) {
String columnName = prop.getColumnName().getName();
if (dropUnusedColumns) {
visitedColumns.add(columnName);
}
ColumnType columnType = prop.getColumnType();
if (columnType == ColumnType.PARTITION_KEY || columnType == ColumnType.CLUSTERING_COLUMN) {
continue;
}
DataType dataType = userType.getFieldType(columnName);
SchemaStatement stmt =
prop.getDataType()
.alterColumn(alter, prop.getColumnName(), optional(columnName, dataType));
if (stmt != null) {
result.add(stmt);
}
}
if (dropUnusedColumns) {
for (String field : userType.getFieldNames()) {
if (!visitedColumns.contains(field)) {
result.add(alter.dropColumn(field));
}
}
}
return result;
}
public static SchemaStatement dropUserType(HelenusEntity entity) {
if (entity.getType() != HelenusEntityType.UDT) {
throw new HelenusMappingException("expected UDT entity " + entity);
}
return SchemaBuilder.dropType(entity.getName().toCql()).ifExists();
}
public static SchemaStatement dropUserType(UserType type) {
return SchemaBuilder.dropType(type.getTypeName()).ifExists();
}
public static String createPrimaryKeyPhrase(Collection<HelenusProperty> properties) {
List<String> p = new ArrayList<String>(properties.size());
List<String> c = new ArrayList<String>(properties.size());
for (HelenusProperty prop : properties) {
String columnName = prop.getColumnName().toCql();
switch (prop.getColumnType()) {
case PARTITION_KEY:
p.add(columnName);
break;
case CLUSTERING_COLUMN:
c.add(columnName);
break;
default:
break;
}
}
return "("
+ ((p.size() > 1) ? "(" + String.join(", ", p) + ")" : p.get(0))
+ ((c.size() > 0)
? ", " + ((c.size() > 1) ? "(" + String.join(", ", c) + ")" : c.get(0))
: "")
+ ")";
}
public static SchemaStatement createMaterializedView(
String keyspace, String viewName, HelenusEntity entity) {
if (entity.getType() != HelenusEntityType.VIEW) {
throw new HelenusMappingException("expected view entity " + entity);
}
List<HelenusPropertyNode> props = new ArrayList<HelenusPropertyNode>();
entity
.getOrderedProperties()
.stream()
.map(p -> new HelenusPropertyNode(p, Optional.empty()))
.forEach(p -> props.add(p));
Select.Selection selection = QueryBuilder.select();
for (HelenusPropertyNode prop : props) {
String columnName = prop.getColumnName();
selection = selection.column(columnName);
}
Class<?> iface = entity.getMappingInterface();
String tableName = Helenus.entity(iface.getInterfaces()[0]).getName().toCql();
Select.Where where = selection.from(tableName).where();
List<String> o = new ArrayList<String>(props.size());
try { for (HelenusPropertyNode prop : props) {
prop.getDataType().addColumn(create, prop.getColumnName()); String columnName = prop.getColumnName();
} catch (IllegalArgumentException e) { switch (prop.getProperty().getColumnType()) {
throw new HelenusMappingException("invalid column name '" + prop.getColumnName() + "' in entity '" case PARTITION_KEY:
+ entity.getName().getName() + "'", e); where = where.and(new IsNotNullClause(columnName));
} break;
}
case CLUSTERING_COLUMN:
return create; where = where.and(new IsNotNullClause(columnName));
}
ClusteringColumn clusteringColumn =
public static List<SchemaStatement> alterUserType(UserType userType, HelenusEntity entity, prop.getProperty().getGetterMethod().getAnnotation(ClusteringColumn.class);
boolean dropUnusedColumns) { if (clusteringColumn != null && clusteringColumn.ordering() != null) {
o.add(columnName + " " + clusteringColumn.ordering().cql());
if (entity.getType() != HelenusEntityType.UDT) { }
throw new HelenusMappingException("expected UDT entity " + entity); break;
} default:
break;
List<SchemaStatement> result = new ArrayList<SchemaStatement>(); }
}
/**
* TODO: In future replace SchemaBuilder.alterTable by SchemaBuilder.alterType String primaryKey = "PRIMARY KEY " + createPrimaryKeyPhrase(entity.getOrderedProperties());
* when it will exist
*/ String clustering = "";
Alter alter = SchemaBuilder.alterTable(entity.getName().toCql()); if (o.size() > 0) {
clustering = "WITH CLUSTERING ORDER BY (" + String.join(", ", o) + ")";
final Set<String> visitedColumns = dropUnusedColumns ? new HashSet<String>() : Collections.<String>emptySet(); }
return new CreateMaterializedView(keyspace, viewName, where, primaryKey, clustering)
for (HelenusProperty prop : entity.getOrderedProperties()) { .ifNotExists();
}
String columnName = prop.getColumnName().getName();
public static SchemaStatement dropMaterializedView(
if (dropUnusedColumns) { String keyspace, String viewName, HelenusEntity entity) {
visitedColumns.add(columnName); return new DropMaterializedView(keyspace, viewName);
} }
ColumnType columnType = prop.getColumnType(); public static SchemaStatement createTable(HelenusEntity entity) {
if (columnType == ColumnType.PARTITION_KEY || columnType == ColumnType.CLUSTERING_COLUMN) { if (entity.getType() != HelenusEntityType.TABLE) {
continue; throw new HelenusMappingException("expected table entity " + entity);
} }
DataType dataType = userType.getFieldType(columnName); // NOTE: There is a bug in the normal path of createTable where the
SchemaStatement stmt = prop.getDataType().alterColumn(alter, prop.getColumnName(), // "cache" is set too early and never unset preventing more than
optional(columnName, dataType)); // one column on a table.
// SchemaBuilder.createTable(entity.getName().toCql());
if (stmt != null) { CreateTable create = new CreateTable(entity.getName().toCql());
result.add(stmt);
} create.ifNotExists();
}
List<HelenusProperty> clusteringColumns = new ArrayList<HelenusProperty>();
if (dropUnusedColumns) {
for (String field : userType.getFieldNames()) { for (HelenusProperty prop : entity.getOrderedProperties()) {
if (!visitedColumns.contains(field)) {
ColumnType columnType = prop.getColumnType();
result.add(alter.dropColumn(field));
} if (columnType == ColumnType.CLUSTERING_COLUMN) {
} clusteringColumns.add(prop);
} }
return result; prop.getDataType().addColumn(create, prop.getColumnName());
} }
public static SchemaStatement dropUserType(HelenusEntity entity) { if (!clusteringColumns.isEmpty()) {
Options options = create.withOptions();
if (entity.getType() != HelenusEntityType.UDT) { clusteringColumns.forEach(
throw new HelenusMappingException("expected UDT entity " + entity); p -> options.clusteringOrder(p.getColumnName().toCql(), mapDirection(p.getOrdering())));
} }
return SchemaBuilder.dropType(entity.getName().toCql()).ifExists(); return create;
} }
public static SchemaStatement dropUserType(UserType type) { public static List<SchemaStatement> alterTable(
TableMetadata tmd, HelenusEntity entity, boolean dropUnusedColumns) {
return SchemaBuilder.dropType(type.getTypeName()).ifExists();
} if (entity.getType() != HelenusEntityType.TABLE) {
throw new HelenusMappingException("expected table entity " + entity);
public static String createPrimaryKeyPhrase(Collection<HelenusProperty> properties) { }
List<String> p = new ArrayList<String>(properties.size());
List<String> c = new ArrayList<String>(properties.size()); List<SchemaStatement> result = new ArrayList<SchemaStatement>();
for (HelenusProperty prop : properties) { Alter alter = SchemaBuilder.alterTable(entity.getName().toCql());
String columnName = prop.getColumnName().toCql();
switch (prop.getColumnType()) { final Set<String> visitedColumns =
case PARTITION_KEY : dropUnusedColumns ? new HashSet<String>() : Collections.<String>emptySet();
p.add(columnName);
break; for (HelenusProperty prop : entity.getOrderedProperties()) {
case CLUSTERING_COLUMN :
c.add(columnName); String columnName = prop.getColumnName().getName();
break;
default : if (dropUnusedColumns) {
break; visitedColumns.add(columnName);
} }
}
ColumnType columnType = prop.getColumnType();
return "(" + ((p.size() > 1) ? "(" + String.join(", ", p) + ")" : p.get(0))
+ ((c.size() > 0) ? ", " + ((c.size() > 1) ? "(" + String.join(", ", c) + ")" : c.get(0)) : "") + ")"; if (columnType == ColumnType.PARTITION_KEY || columnType == ColumnType.CLUSTERING_COLUMN) {
} continue;
}
public static SchemaStatement createMaterializedView(String keyspace, String viewName, HelenusEntity entity) {
if (entity.getType() != HelenusEntityType.VIEW) { ColumnMetadata columnMetadata = tmd.getColumn(columnName);
throw new HelenusMappingException("expected view entity " + entity); SchemaStatement stmt =
} prop.getDataType().alterColumn(alter, prop.getColumnName(), optional(columnMetadata));
if (entity == null) { if (stmt != null) {
throw new HelenusMappingException("no entity or table to select data"); result.add(stmt);
} }
}
List<HelenusPropertyNode> props = new ArrayList<HelenusPropertyNode>();
entity.getOrderedProperties().stream().map(p -> new HelenusPropertyNode(p, Optional.empty())) if (dropUnusedColumns) {
.forEach(p -> props.add(p)); for (ColumnMetadata cm : tmd.getColumns()) {
if (!visitedColumns.contains(cm.getName())) {
Select.Selection selection = QueryBuilder.select();
result.add(alter.dropColumn(cm.getName()));
for (HelenusPropertyNode prop : props) { }
String columnName = prop.getColumnName(); }
selection = selection.column(columnName); }
}
Class<?> iface = entity.getMappingInterface(); return result;
String tableName = Helenus.entity(iface.getInterfaces()[0]).getName().toCql(); }
Select.Where where = selection.from(tableName).where();
List<String> o = new ArrayList<String>(props.size()); public static SchemaStatement dropTable(HelenusEntity entity) {
for (HelenusPropertyNode prop : props) { if (entity.getType() != HelenusEntityType.TABLE) {
String columnName = prop.getColumnName(); throw new HelenusMappingException("expected table entity " + entity);
switch (prop.getProperty().getColumnType()) { }
case PARTITION_KEY :
where = where.and(new IsNotNullClause(columnName)); return SchemaBuilder.dropTable(entity.getName().toCql()).ifExists();
break; }
case CLUSTERING_COLUMN : public static SchemaStatement createIndex(HelenusProperty prop) {
where = where.and(new IsNotNullClause(columnName)); if (prop.caseSensitiveIndex()) {
return SchemaBuilder.createIndex(prop.getIndexName().get().toCql())
ClusteringColumn clusteringColumn = prop.getProperty().getGetterMethod() .ifNotExists()
.getAnnotation(ClusteringColumn.class); .onTable(prop.getEntity().getName().toCql())
if (clusteringColumn != null && clusteringColumn.ordering() != null) { .andColumn(prop.getColumnName().toCql());
o.add(columnName + " " + clusteringColumn.ordering().cql()); } else {
} return new CreateSasiIndex(prop.getIndexName().get().toCql())
break; .ifNotExists()
default : .onTable(prop.getEntity().getName().toCql())
break; .andColumn(prop.getColumnName().toCql());
} }
} }
String primaryKey = "PRIMARY KEY " + createPrimaryKeyPhrase(entity.getOrderedProperties()); public static List<SchemaStatement> createIndexes(HelenusEntity entity) {
String clustering = ""; return entity
if (o.size() > 0) { .getOrderedProperties()
clustering = "WITH CLUSTERING ORDER BY (" + String.join(", ", o) + ")"; .stream()
} .filter(p -> p.getIndexName().isPresent())
return new CreateMaterializedView(keyspace, viewName, where, primaryKey, clustering).ifNotExists(); .map(p -> SchemaUtil.createIndex(p))
} .collect(Collectors.toList());
}
public static SchemaStatement dropMaterializedView(String keyspace, String viewName, HelenusEntity entity) {
return new DropMaterializedView(keyspace, viewName); public static List<SchemaStatement> alterIndexes(
} TableMetadata tmd, HelenusEntity entity, boolean dropUnusedIndexes) {
public static SchemaStatement createTable(HelenusEntity entity) { List<SchemaStatement> list = new ArrayList<SchemaStatement>();
if (entity.getType() != HelenusEntityType.TABLE) { final Set<String> visitedColumns =
throw new HelenusMappingException("expected table entity " + entity); dropUnusedIndexes ? new HashSet<String>() : Collections.<String>emptySet();
}
entity
// NOTE: There is a bug in the normal path of createTable where the .getOrderedProperties()
// "cache" is set too early and never unset preventing more than .stream()
// one column on a table. .filter(p -> p.getIndexName().isPresent())
// SchemaBuilder.createTable(entity.getName().toCql()); .forEach(
CreateTable create = new CreateTable(entity.getName().toCql()); p -> {
String columnName = p.getColumnName().getName();
create.ifNotExists();
if (dropUnusedIndexes) {
List<HelenusProperty> clusteringColumns = new ArrayList<HelenusProperty>(); visitedColumns.add(columnName);
}
for (HelenusProperty prop : entity.getOrderedProperties()) {
ColumnMetadata cm = tmd.getColumn(columnName);
ColumnType columnType = prop.getColumnType();
if (cm != null) {
if (columnType == ColumnType.CLUSTERING_COLUMN) { IndexMetadata im = tmd.getIndex(columnName);
clusteringColumns.add(prop); if (im == null) {
} list.add(createIndex(p));
}
prop.getDataType().addColumn(create, prop.getColumnName()); } else {
} list.add(createIndex(p));
}
if (!clusteringColumns.isEmpty()) { });
Options options = create.withOptions();
clusteringColumns if (dropUnusedIndexes) {
.forEach(p -> options.clusteringOrder(p.getColumnName().toCql(), mapDirection(p.getOrdering())));
} tmd.getColumns()
.stream()
return create; .filter(c -> tmd.getIndex(c.getName()) != null && !visitedColumns.contains(c.getName()))
} .forEach(
c -> {
public static List<SchemaStatement> alterTable(TableMetadata tmd, HelenusEntity entity, boolean dropUnusedColumns) { list.add(SchemaBuilder.dropIndex(tmd.getIndex(c.getName()).getName()).ifExists());
});
if (entity.getType() != HelenusEntityType.TABLE) { }
throw new HelenusMappingException("expected table entity " + entity);
} return list;
}
List<SchemaStatement> result = new ArrayList<SchemaStatement>();
public static SchemaStatement dropIndex(HelenusProperty prop) {
Alter alter = SchemaBuilder.alterTable(entity.getName().toCql()); return SchemaBuilder.dropIndex(prop.getIndexName().get().toCql()).ifExists();
}
final Set<String> visitedColumns = dropUnusedColumns ? new HashSet<String>() : Collections.<String>emptySet();
private static SchemaBuilder.Direction mapDirection(OrderingDirection o) {
for (HelenusProperty prop : entity.getOrderedProperties()) { switch (o) {
case ASC:
String columnName = prop.getColumnName().getName(); return SchemaBuilder.Direction.ASC;
case DESC:
if (dropUnusedColumns) { return SchemaBuilder.Direction.DESC;
visitedColumns.add(columnName); }
} throw new HelenusMappingException("unknown ordering " + o);
}
ColumnType columnType = prop.getColumnType();
public static void throwNoMapping(HelenusProperty prop) {
if (columnType == ColumnType.PARTITION_KEY || columnType == ColumnType.CLUSTERING_COLUMN) {
continue; throw new HelenusMappingException(
} "only primitive types and Set,List,Map collections and UserDefinedTypes are allowed, unknown type for property '"
+ prop.getPropertyName()
ColumnMetadata columnMetadata = tmd.getColumn(columnName); + "' type is '"
SchemaStatement stmt = prop.getDataType().alterColumn(alter, prop.getColumnName(), + prop.getJavaType()
optional(columnMetadata)); + "' in the entity "
+ prop.getEntity());
if (stmt != null) { }
result.add(stmt);
} private static OptionalColumnMetadata optional(final ColumnMetadata columnMetadata) {
} if (columnMetadata != null) {
return new OptionalColumnMetadata() {
if (dropUnusedColumns) {
for (ColumnMetadata cm : tmd.getColumns()) { @Override
if (!visitedColumns.contains(cm.getName())) { public String getName() {
return columnMetadata.getName();
result.add(alter.dropColumn(cm.getName())); }
}
} @Override
} public DataType getType() {
return columnMetadata.getType();
return result; }
} };
}
public static SchemaStatement dropTable(HelenusEntity entity) { return null;
}
if (entity.getType() != HelenusEntityType.TABLE) {
throw new HelenusMappingException("expected table entity " + entity); private static OptionalColumnMetadata optional(final String name, final DataType dataType) {
} if (dataType != null) {
return new OptionalColumnMetadata() {
return SchemaBuilder.dropTable(entity.getName().toCql()).ifExists();
} @Override
public String getName() {
public static SchemaStatement createIndex(HelenusProperty prop) { return name;
if (prop.caseSensitiveIndex()) { }
return SchemaBuilder.createIndex(prop.getIndexName().get().toCql()).ifNotExists()
.onTable(prop.getEntity().getName().toCql()).andColumn(prop.getColumnName().toCql()); @Override
} else { public DataType getType() {
return new CreateSasiIndex(prop.getIndexName().get().toCql()).ifNotExists() return dataType;
.onTable(prop.getEntity().getName().toCql()).andColumn(prop.getColumnName().toCql()); }
} };
} }
return null;
public static List<SchemaStatement> createIndexes(HelenusEntity entity) { }
return entity.getOrderedProperties().stream().filter(p -> p.getIndexName().isPresent())
.map(p -> SchemaUtil.createIndex(p)).collect(Collectors.toList());
}
public static List<SchemaStatement> alterIndexes(TableMetadata tmd, HelenusEntity entity,
boolean dropUnusedIndexes) {
List<SchemaStatement> list = new ArrayList<SchemaStatement>();
final Set<String> visitedColumns = dropUnusedIndexes ? new HashSet<String>() : Collections.<String>emptySet();
entity.getOrderedProperties().stream().filter(p -> p.getIndexName().isPresent()).forEach(p -> {
String columnName = p.getColumnName().getName();
if (dropUnusedIndexes) {
visitedColumns.add(columnName);
}
ColumnMetadata cm = tmd.getColumn(columnName);
if (cm != null) {
IndexMetadata im = tmd.getIndex(columnName);
if (im == null) {
list.add(createIndex(p));
}
} else {
list.add(createIndex(p));
}
});
if (dropUnusedIndexes) {
tmd.getColumns().stream()
.filter(c -> tmd.getIndex(c.getName()) != null && !visitedColumns.contains(c.getName()))
.forEach(c -> {
list.add(SchemaBuilder.dropIndex(tmd.getIndex(c.getName()).getName()).ifExists());
});
}
return list;
}
public static SchemaStatement dropIndex(HelenusProperty prop) {
return SchemaBuilder.dropIndex(prop.getIndexName().get().toCql()).ifExists();
}
private static SchemaBuilder.Direction mapDirection(OrderingDirection o) {
switch (o) {
case ASC :
return SchemaBuilder.Direction.ASC;
case DESC :
return SchemaBuilder.Direction.DESC;
}
throw new HelenusMappingException("unknown ordering " + o);
}
public static void throwNoMapping(HelenusProperty prop) {
throw new HelenusMappingException(
"only primitive types and Set,List,Map collections and UserDefinedTypes are allowed, unknown type for property '"
+ prop.getPropertyName() + "' type is '" + prop.getJavaType() + "' in the entity "
+ prop.getEntity());
}
private static OptionalColumnMetadata optional(final ColumnMetadata columnMetadata) {
if (columnMetadata != null) {
return new OptionalColumnMetadata() {
@Override
public String getName() {
return columnMetadata.getName();
}
@Override
public DataType getType() {
return columnMetadata.getType();
}
};
}
return null;
}
private static OptionalColumnMetadata optional(final String name, final DataType dataType) {
if (dataType != null) {
return new OptionalColumnMetadata() {
@Override
public String getName() {
return name;
}
@Override
public DataType getType() {
return dataType;
}
};
}
return null;
}
} }

View file

@ -15,18 +15,16 @@
*/ */
package net.helenus.core; package net.helenus.core;
import brave.Tracer;
import com.codahale.metrics.MetricRegistry;
import com.datastax.driver.core.*;
import com.google.common.util.concurrent.MoreExecutors;
import java.io.IOException; import java.io.IOException;
import java.io.PrintStream; import java.io.PrintStream;
import java.util.*; import java.util.*;
import java.util.concurrent.Executor; import java.util.concurrent.Executor;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
import java.util.function.Consumer; import java.util.function.Consumer;
import com.codahale.metrics.MetricRegistry;
import com.datastax.driver.core.*;
import com.google.common.util.concurrent.MoreExecutors;
import brave.Tracer;
import net.helenus.core.cache.SessionCache; import net.helenus.core.cache.SessionCache;
import net.helenus.core.reflect.DslExportable; import net.helenus.core.reflect.DslExportable;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
@ -40,350 +38,405 @@ import net.helenus.support.PackageUtil;
public final class SessionInitializer extends AbstractSessionOperations { public final class SessionInitializer extends AbstractSessionOperations {
private final Session session; private final Session session;
private final List<Either<Object, Class<?>>> initList = new ArrayList<Either<Object, Class<?>>>(); private final List<Either<Object, Class<?>>> initList = new ArrayList<Either<Object, Class<?>>>();
private CodecRegistry registry; private CodecRegistry registry;
private String usingKeyspace; private String usingKeyspace;
private boolean showCql = false; private boolean showCql = false;
private ConsistencyLevel consistencyLevel; private ConsistencyLevel consistencyLevel;
private boolean idempotent = true; private boolean idempotent = true;
private MetricRegistry metricRegistry = new MetricRegistry(); private MetricRegistry metricRegistry = new MetricRegistry();
private Tracer zipkinTracer; private Tracer zipkinTracer;
private PrintStream printStream = System.out; private PrintStream printStream = System.out;
private Executor executor = MoreExecutors.directExecutor(); private Executor executor = MoreExecutors.directExecutor();
private Class<? extends UnitOfWork> unitOfWorkClass = UnitOfWorkImpl.class; private Class<? extends UnitOfWork> unitOfWorkClass = UnitOfWorkImpl.class;
private SessionRepositoryBuilder sessionRepository; private SessionRepositoryBuilder sessionRepository;
private boolean dropUnusedColumns = false; private boolean dropUnusedColumns = false;
private boolean dropUnusedIndexes = false; private boolean dropUnusedIndexes = false;
private KeyspaceMetadata keyspaceMetadata; private KeyspaceMetadata keyspaceMetadata;
private AutoDdl autoDdl = AutoDdl.UPDATE; private AutoDdl autoDdl = AutoDdl.UPDATE;
private SessionCache sessionCache = null; private SessionCache sessionCache = null;
SessionInitializer(Session session) { SessionInitializer(Session session) {
this.session = Objects.requireNonNull(session, "empty session"); this.session = Objects.requireNonNull(session, "empty session");
this.usingKeyspace = session.getLoggedKeyspace(); // can be null this.usingKeyspace = session.getLoggedKeyspace(); // can be null
this.sessionRepository = new SessionRepositoryBuilder(session); this.sessionRepository = new SessionRepositoryBuilder(session);
} }
@Override @Override
public Session currentSession() { public Session currentSession() {
return session; return session;
} }
@Override @Override
public String usingKeyspace() { public String usingKeyspace() {
return usingKeyspace; return usingKeyspace;
} }
@Override @Override
public Executor getExecutor() { public Executor getExecutor() {
return executor; return executor;
} }
@Override @Override
public SessionRepository getSessionRepository() { public SessionRepository getSessionRepository() {
throw new HelenusException("not expected to call"); throw new HelenusException("not expected to call");
} }
@Override @Override
public ColumnValueProvider getValueProvider() { public ColumnValueProvider getValueProvider() {
throw new HelenusException("not expected to call"); throw new HelenusException("not expected to call");
} }
@Override @Override
public ColumnValuePreparer getValuePreparer() { public ColumnValuePreparer getValuePreparer() {
throw new HelenusException("not expected to call"); throw new HelenusException("not expected to call");
} }
public SessionInitializer showCql() { public SessionInitializer showCql() {
this.showCql = true; this.showCql = true;
return this; return this;
} }
public SessionInitializer showCql(boolean enabled) { public SessionInitializer showCql(boolean enabled) {
this.showCql = enabled; this.showCql = enabled;
return this; return this;
} }
public SessionInitializer metricRegistry(MetricRegistry metricRegistry) { public SessionInitializer metricRegistry(MetricRegistry metricRegistry) {
this.metricRegistry = metricRegistry; this.metricRegistry = metricRegistry;
return this; return this;
} }
public SessionInitializer zipkinTracer(Tracer tracer) { public SessionInitializer zipkinTracer(Tracer tracer) {
this.zipkinTracer = tracer; this.zipkinTracer = tracer;
return this; return this;
} }
public SessionInitializer setUnitOfWorkClass(Class<? extends UnitOfWork> e) { public SessionInitializer setUnitOfWorkClass(Class<? extends UnitOfWork> e) {
this.unitOfWorkClass = e; this.unitOfWorkClass = e;
return this; return this;
} }
public SessionInitializer consistencyLevel(ConsistencyLevel consistencyLevel) { public SessionInitializer consistencyLevel(ConsistencyLevel consistencyLevel) {
this.consistencyLevel = consistencyLevel; this.consistencyLevel = consistencyLevel;
return this; return this;
} }
public SessionInitializer setSessionCache(SessionCache sessionCache) { public SessionInitializer setSessionCache(SessionCache sessionCache) {
this.sessionCache = sessionCache; this.sessionCache = sessionCache;
return this; return this;
} }
public ConsistencyLevel getDefaultConsistencyLevel() { public ConsistencyLevel getDefaultConsistencyLevel() {
return consistencyLevel; return consistencyLevel;
} }
public SessionInitializer idempotentQueryExecution(boolean idempotent) { public SessionInitializer idempotentQueryExecution(boolean idempotent) {
this.idempotent = idempotent; this.idempotent = idempotent;
return this; return this;
} }
public boolean getDefaultQueryIdempotency() { public boolean getDefaultQueryIdempotency() {
return idempotent; return idempotent;
} }
@Override @Override
public PrintStream getPrintStream() { public PrintStream getPrintStream() {
return printStream; return printStream;
} }
public SessionInitializer printTo(PrintStream out) { public SessionInitializer printTo(PrintStream out) {
this.printStream = out; this.printStream = out;
return this; return this;
} }
public SessionInitializer withExecutor(Executor executor) { public SessionInitializer withExecutor(Executor executor) {
Objects.requireNonNull(executor, "empty executor"); Objects.requireNonNull(executor, "empty executor");
this.executor = executor; this.executor = executor;
return this; return this;
} }
public SessionInitializer withCachingExecutor() { public SessionInitializer withCachingExecutor() {
this.executor = Executors.newCachedThreadPool(); this.executor = Executors.newCachedThreadPool();
return this; return this;
} }
public SessionInitializer dropUnusedColumns(boolean enabled) { public SessionInitializer dropUnusedColumns(boolean enabled) {
this.dropUnusedColumns = enabled; this.dropUnusedColumns = enabled;
return this; return this;
} }
public SessionInitializer dropUnusedIndexes(boolean enabled) { public SessionInitializer dropUnusedIndexes(boolean enabled) {
this.dropUnusedIndexes = enabled; this.dropUnusedIndexes = enabled;
return this; return this;
} }
public SessionInitializer withCodecRegistry(CodecRegistry registry) { public SessionInitializer withCodecRegistry(CodecRegistry registry) {
this.registry = registry; this.registry = registry;
return this; return this;
} }
@Override @Override
public boolean isShowCql() { public boolean isShowCql() {
return showCql; return showCql;
} }
public SessionInitializer addPackage(String packageName) { public SessionInitializer addPackage(String packageName) {
try { try {
PackageUtil.getClasses(packageName).stream().filter(c -> c.isInterface() && !c.isAnnotation()) PackageUtil.getClasses(packageName)
.forEach(clazz -> { .stream()
initList.add(Either.right(clazz)); .filter(c -> c.isInterface() && !c.isAnnotation())
}); .forEach(
} catch (IOException | ClassNotFoundException e) { clazz -> {
throw new HelenusException("fail to add package " + packageName, e); initList.add(Either.right(clazz));
} });
return this; } catch (IOException | ClassNotFoundException e) {
} throw new HelenusException("fail to add package " + packageName, e);
}
public SessionInitializer add(Object... dsls) { return this;
Objects.requireNonNull(dsls, "dsls is empty"); }
int len = dsls.length;
for (int i = 0; i != len; ++i) { public SessionInitializer add(Object... dsls) {
Object obj = Objects.requireNonNull(dsls[i], "element " + i + " is empty"); Objects.requireNonNull(dsls, "dsls is empty");
initList.add(Either.left(obj)); int len = dsls.length;
} for (int i = 0; i != len; ++i) {
return this; Object obj = Objects.requireNonNull(dsls[i], "element " + i + " is empty");
} initList.add(Either.left(obj));
}
public SessionInitializer autoValidate() { return this;
this.autoDdl = AutoDdl.VALIDATE; }
return this;
} public SessionInitializer autoValidate() {
this.autoDdl = AutoDdl.VALIDATE;
public SessionInitializer autoUpdate() { return this;
this.autoDdl = AutoDdl.UPDATE; }
return this;
} public SessionInitializer autoUpdate() {
this.autoDdl = AutoDdl.UPDATE;
public SessionInitializer autoCreate() { return this;
this.autoDdl = AutoDdl.CREATE; }
return this;
} public SessionInitializer autoCreate() {
this.autoDdl = AutoDdl.CREATE;
public SessionInitializer autoCreateDrop() { return this;
this.autoDdl = AutoDdl.CREATE_DROP; }
return this;
} public SessionInitializer autoCreateDrop() {
this.autoDdl = AutoDdl.CREATE_DROP;
public SessionInitializer auto(AutoDdl autoDdl) { return this;
this.autoDdl = autoDdl; }
return this;
} public SessionInitializer auto(AutoDdl autoDdl) {
this.autoDdl = autoDdl;
public SessionInitializer use(String keyspace) { return this;
session.execute(SchemaUtil.use(keyspace, false)); }
this.usingKeyspace = keyspace;
return this; public SessionInitializer use(String keyspace) {
} session.execute(SchemaUtil.use(keyspace, false));
this.usingKeyspace = keyspace;
public SessionInitializer use(String keyspace, boolean forceQuote) { return this;
session.execute(SchemaUtil.use(keyspace, forceQuote)); }
this.usingKeyspace = keyspace;
return this; public SessionInitializer use(String keyspace, boolean forceQuote) {
} session.execute(SchemaUtil.use(keyspace, forceQuote));
this.usingKeyspace = keyspace;
public void singleton() { return this;
Helenus.setSession(get()); }
}
public void singleton() {
public synchronized HelenusSession get() { Helenus.setSession(get());
initialize(); }
return new HelenusSession(session, usingKeyspace, registry, showCql, printStream, sessionRepository, executor,
autoDdl == AutoDdl.CREATE_DROP, consistencyLevel, idempotent, unitOfWorkClass, sessionCache, public synchronized HelenusSession get() {
metricRegistry, zipkinTracer); initialize();
} return new HelenusSession(
session,
private void initialize() { usingKeyspace,
registry,
Objects.requireNonNull(usingKeyspace, "please define keyspace by 'use' operator"); showCql,
printStream,
initList.forEach((either) -> { sessionRepository,
Class<?> iface = null; executor,
if (either.isLeft()) { autoDdl == AutoDdl.CREATE_DROP,
iface = MappingUtil.getMappingInterface(either.getLeft()); consistencyLevel,
} else { idempotent,
iface = either.getRight(); unitOfWorkClass,
} sessionCache,
metricRegistry,
DslExportable dsl = (DslExportable) Helenus.dsl(iface); zipkinTracer);
dsl.setCassandraMetadataForHelenusSession(session.getCluster().getMetadata()); }
sessionRepository.add(dsl);
}); private void initialize() {
TableOperations tableOps = new TableOperations(this, dropUnusedColumns, dropUnusedIndexes); Objects.requireNonNull(usingKeyspace, "please define keyspace by 'use' operator");
UserTypeOperations userTypeOps = new UserTypeOperations(this, dropUnusedColumns);
initList.forEach(
switch (autoDdl) { (either) -> {
case CREATE_DROP : Class<?> iface = null;
if (either.isLeft()) {
// Drop view first, otherwise a `DROP TABLE ...` will fail as the type is still iface = MappingUtil.getMappingInterface(either.getLeft());
// referenced } else {
// by a view. iface = either.getRight();
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.VIEW) }
.forEach(e -> tableOps.dropView(e));
DslExportable dsl = (DslExportable) Helenus.dsl(iface);
// Drop tables second, before DROP TYPE otherwise a `DROP TYPE ...` will fail as dsl.setCassandraMetadataForHelenusSession(session.getCluster().getMetadata());
// the type is sessionRepository.add(dsl);
// still referenced by a table. });
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.TABLE)
.forEach(e -> tableOps.dropTable(e)); TableOperations tableOps = new TableOperations(this, dropUnusedColumns, dropUnusedIndexes);
UserTypeOperations userTypeOps = new UserTypeOperations(this, dropUnusedColumns);
eachUserTypeInReverseOrder(userTypeOps, e -> userTypeOps.dropUserType(e));
switch (autoDdl) {
// FALLTHRU to CREATE case (read: the absence of a `break;` statement here is case CREATE_DROP:
// intentional!)
case CREATE : // Drop view first, otherwise a `DROP TABLE ...` will fail as the type is still
eachUserTypeInOrder(userTypeOps, e -> userTypeOps.createUserType(e)); // referenced
// by a view.
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.TABLE) sessionRepository
.forEach(e -> tableOps.createTable(e)); .entities()
.stream()
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.VIEW) .filter(e -> e.getType() == HelenusEntityType.VIEW)
.forEach(e -> tableOps.createView(e)); .forEach(e -> tableOps.dropView(e));
break; // Drop tables second, before DROP TYPE otherwise a `DROP TYPE ...` will fail as
// the type is
case VALIDATE : // still referenced by a table.
eachUserTypeInOrder(userTypeOps, e -> userTypeOps.validateUserType(getUserType(e), e)); sessionRepository
.entities()
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.TABLE) .stream()
.forEach(e -> tableOps.validateTable(getTableMetadata(e), e)); .filter(e -> e.getType() == HelenusEntityType.TABLE)
.forEach(e -> tableOps.dropTable(e));
break;
eachUserTypeInReverseOrder(userTypeOps, e -> userTypeOps.dropUserType(e));
case UPDATE :
eachUserTypeInOrder(userTypeOps, e -> userTypeOps.updateUserType(getUserType(e), e)); // FALLTHRU to CREATE case (read: the absence of a `break;` statement here is
// intentional!)
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.VIEW) case CREATE:
.forEach(e -> tableOps.dropView(e)); eachUserTypeInOrder(userTypeOps, e -> userTypeOps.createUserType(e));
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.TABLE) sessionRepository
.forEach(e -> tableOps.updateTable(getTableMetadata(e), e)); .entities()
.stream()
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.VIEW) .filter(e -> e.getType() == HelenusEntityType.TABLE)
.forEach(e -> tableOps.createView(e)); .forEach(e -> tableOps.createTable(e));
break;
} sessionRepository
.entities()
KeyspaceMetadata km = getKeyspaceMetadata(); .stream()
.filter(e -> e.getType() == HelenusEntityType.VIEW)
for (UserType userType : km.getUserTypes()) { .forEach(e -> tableOps.createView(e));
sessionRepository.addUserType(userType.getTypeName(), userType);
} break;
}
case VALIDATE:
private void eachUserTypeInOrder(UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) { eachUserTypeInOrder(userTypeOps, e -> userTypeOps.validateUserType(getUserType(e), e));
Set<HelenusEntity> processedSet = new HashSet<HelenusEntity>(); sessionRepository
Set<HelenusEntity> stack = new HashSet<HelenusEntity>(); .entities()
.stream()
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.UDT).forEach(e -> { .filter(e -> e.getType() == HelenusEntityType.TABLE)
stack.clear(); .forEach(e -> tableOps.validateTable(getTableMetadata(e), e));
eachUserTypeInRecursion(e, processedSet, stack, userTypeOps, action);
}); break;
}
case UPDATE:
private void eachUserTypeInReverseOrder(UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) { eachUserTypeInOrder(userTypeOps, e -> userTypeOps.updateUserType(getUserType(e), e));
ArrayDeque<HelenusEntity> deque = new ArrayDeque<>();
eachUserTypeInOrder(userTypeOps, e -> deque.addFirst(e)); sessionRepository
deque.stream().forEach(e -> { .entities()
action.accept(e); .stream()
}); .filter(e -> e.getType() == HelenusEntityType.VIEW)
} .forEach(e -> tableOps.dropView(e));
private void eachUserTypeInRecursion(HelenusEntity e, Set<HelenusEntity> processedSet, Set<HelenusEntity> stack, sessionRepository
UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) { .entities()
.stream()
stack.add(e); .filter(e -> e.getType() == HelenusEntityType.TABLE)
.forEach(e -> tableOps.updateTable(getTableMetadata(e), e));
Collection<HelenusEntity> createBefore = sessionRepository.getUserTypeUses(e);
sessionRepository
for (HelenusEntity be : createBefore) { .entities()
if (!processedSet.contains(be) && !stack.contains(be)) { .stream()
eachUserTypeInRecursion(be, processedSet, stack, userTypeOps, action); .filter(e -> e.getType() == HelenusEntityType.VIEW)
processedSet.add(be); .forEach(e -> tableOps.createView(e));
} break;
} }
if (!processedSet.contains(e)) { KeyspaceMetadata km = getKeyspaceMetadata();
action.accept(e);
processedSet.add(e); for (UserType userType : km.getUserTypes()) {
} sessionRepository.addUserType(userType.getTypeName(), userType);
} }
}
private KeyspaceMetadata getKeyspaceMetadata() {
if (keyspaceMetadata == null) { private void eachUserTypeInOrder(
keyspaceMetadata = session.getCluster().getMetadata().getKeyspace(usingKeyspace.toLowerCase()); UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) {
}
return keyspaceMetadata; Set<HelenusEntity> processedSet = new HashSet<HelenusEntity>();
} Set<HelenusEntity> stack = new HashSet<HelenusEntity>();
private TableMetadata getTableMetadata(HelenusEntity entity) { sessionRepository
return getKeyspaceMetadata().getTable(entity.getName().getName()); .entities()
} .stream()
.filter(e -> e.getType() == HelenusEntityType.UDT)
private UserType getUserType(HelenusEntity entity) { .forEach(
return getKeyspaceMetadata().getUserType(entity.getName().getName()); e -> {
} stack.clear();
eachUserTypeInRecursion(e, processedSet, stack, userTypeOps, action);
});
}
private void eachUserTypeInReverseOrder(
UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) {
ArrayDeque<HelenusEntity> deque = new ArrayDeque<>();
eachUserTypeInOrder(userTypeOps, e -> deque.addFirst(e));
deque
.stream()
.forEach(
e -> {
action.accept(e);
});
}
private void eachUserTypeInRecursion(
HelenusEntity e,
Set<HelenusEntity> processedSet,
Set<HelenusEntity> stack,
UserTypeOperations userTypeOps,
Consumer<? super HelenusEntity> action) {
stack.add(e);
Collection<HelenusEntity> createBefore = sessionRepository.getUserTypeUses(e);
for (HelenusEntity be : createBefore) {
if (!processedSet.contains(be) && !stack.contains(be)) {
eachUserTypeInRecursion(be, processedSet, stack, userTypeOps, action);
processedSet.add(be);
}
}
if (!processedSet.contains(e)) {
action.accept(e);
processedSet.add(e);
}
}
private KeyspaceMetadata getKeyspaceMetadata() {
if (keyspaceMetadata == null) {
keyspaceMetadata =
session.getCluster().getMetadata().getKeyspace(usingKeyspace.toLowerCase());
}
return keyspaceMetadata;
}
private TableMetadata getTableMetadata(HelenusEntity entity) {
return getKeyspaceMetadata().getTable(entity.getName().getName());
}
private UserType getUserType(HelenusEntity entity) {
return getKeyspaceMetadata().getUserType(entity.getName().getName());
}
} }

View file

@ -15,31 +15,30 @@
*/ */
package net.helenus.core; package net.helenus.core;
import java.util.Collection;
import com.datastax.driver.core.UserType; import com.datastax.driver.core.UserType;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import java.util.Collection;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
public final class SessionRepository { public final class SessionRepository {
private final ImmutableMap<String, UserType> userTypeMap; private final ImmutableMap<String, UserType> userTypeMap;
private final ImmutableMap<Class<?>, HelenusEntity> entityMap; private final ImmutableMap<Class<?>, HelenusEntity> entityMap;
public SessionRepository(SessionRepositoryBuilder builder) { public SessionRepository(SessionRepositoryBuilder builder) {
userTypeMap = ImmutableMap.<String, UserType>builder().putAll(builder.getUserTypeMap()).build(); userTypeMap = ImmutableMap.<String, UserType>builder().putAll(builder.getUserTypeMap()).build();
entityMap = ImmutableMap.<Class<?>, HelenusEntity>builder().putAll(builder.getEntityMap()).build(); entityMap =
} ImmutableMap.<Class<?>, HelenusEntity>builder().putAll(builder.getEntityMap()).build();
}
public UserType findUserType(String name) { public UserType findUserType(String name) {
return userTypeMap.get(name.toLowerCase()); return userTypeMap.get(name.toLowerCase());
} }
public Collection<HelenusEntity> entities() { public Collection<HelenusEntity> entities() {
return entityMap.values(); return entityMap.values();
} }
} }

View file

@ -15,17 +15,15 @@
*/ */
package net.helenus.core; package net.helenus.core;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import com.datastax.driver.core.Session; import com.datastax.driver.core.Session;
import com.datastax.driver.core.UDTValue; import com.datastax.driver.core.UDTValue;
import com.datastax.driver.core.UserType; import com.datastax.driver.core.UserType;
import com.google.common.collect.HashMultimap; import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap; import com.google.common.collect.Multimap;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.HelenusEntityType; import net.helenus.mapping.HelenusEntityType;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
@ -35,110 +33,112 @@ import net.helenus.support.HelenusMappingException;
public final class SessionRepositoryBuilder { public final class SessionRepositoryBuilder {
private static final Optional<HelenusEntityType> OPTIONAL_UDT = Optional.of(HelenusEntityType.UDT); private static final Optional<HelenusEntityType> OPTIONAL_UDT =
Optional.of(HelenusEntityType.UDT);
private final Map<Class<?>, HelenusEntity> entityMap = new HashMap<Class<?>, HelenusEntity>(); private final Map<Class<?>, HelenusEntity> entityMap = new HashMap<Class<?>, HelenusEntity>();
private final Map<String, UserType> userTypeMap = new HashMap<String, UserType>(); private final Map<String, UserType> userTypeMap = new HashMap<String, UserType>();
private final Multimap<HelenusEntity, HelenusEntity> userTypeUsesMap = HashMultimap.create(); private final Multimap<HelenusEntity, HelenusEntity> userTypeUsesMap = HashMultimap.create();
private final Session session; private final Session session;
SessionRepositoryBuilder(Session session) { SessionRepositoryBuilder(Session session) {
this.session = session; this.session = session;
} }
public SessionRepository build() { public SessionRepository build() {
return new SessionRepository(this); return new SessionRepository(this);
} }
public Collection<HelenusEntity> getUserTypeUses(HelenusEntity udtName) { public Collection<HelenusEntity> getUserTypeUses(HelenusEntity udtName) {
return userTypeUsesMap.get(udtName); return userTypeUsesMap.get(udtName);
} }
public Collection<HelenusEntity> entities() { public Collection<HelenusEntity> entities() {
return entityMap.values(); return entityMap.values();
} }
protected Map<Class<?>, HelenusEntity> getEntityMap() { protected Map<Class<?>, HelenusEntity> getEntityMap() {
return entityMap; return entityMap;
} }
protected Map<String, UserType> getUserTypeMap() { protected Map<String, UserType> getUserTypeMap() {
return userTypeMap; return userTypeMap;
} }
public void addUserType(String name, UserType userType) { public void addUserType(String name, UserType userType) {
userTypeMap.putIfAbsent(name.toLowerCase(), userType); userTypeMap.putIfAbsent(name.toLowerCase(), userType);
} }
public HelenusEntity add(Object dsl) { public HelenusEntity add(Object dsl) {
return add(dsl, Optional.empty()); return add(dsl, Optional.empty());
} }
public void addEntity(HelenusEntity entity) { public void addEntity(HelenusEntity entity) {
HelenusEntity concurrentEntity = entityMap.putIfAbsent(entity.getMappingInterface(), entity); HelenusEntity concurrentEntity = entityMap.putIfAbsent(entity.getMappingInterface(), entity);
if (concurrentEntity == null) { if (concurrentEntity == null) {
addUserDefinedTypes(entity.getOrderedProperties()); addUserDefinedTypes(entity.getOrderedProperties());
} }
} }
public HelenusEntity add(Object dsl, Optional<HelenusEntityType> type) { public HelenusEntity add(Object dsl, Optional<HelenusEntityType> type) {
HelenusEntity helenusEntity = Helenus.resolve(dsl, session.getCluster().getMetadata()); HelenusEntity helenusEntity = Helenus.resolve(dsl, session.getCluster().getMetadata());
Class<?> iface = helenusEntity.getMappingInterface(); Class<?> iface = helenusEntity.getMappingInterface();
HelenusEntity entity = entityMap.get(iface); HelenusEntity entity = entityMap.get(iface);
if (entity == null) { if (entity == null) {
entity = helenusEntity; entity = helenusEntity;
if (type.isPresent() && entity.getType() != type.get()) { if (type.isPresent() && entity.getType() != type.get()) {
throw new HelenusMappingException("unexpected entity type " + entity.getType() + " for " + entity); throw new HelenusMappingException(
} "unexpected entity type " + entity.getType() + " for " + entity);
}
HelenusEntity concurrentEntity = entityMap.putIfAbsent(iface, entity); HelenusEntity concurrentEntity = entityMap.putIfAbsent(iface, entity);
if (concurrentEntity == null) { if (concurrentEntity == null) {
addUserDefinedTypes(entity.getOrderedProperties()); addUserDefinedTypes(entity.getOrderedProperties());
} else { } else {
entity = concurrentEntity; entity = concurrentEntity;
} }
} }
return entity; return entity;
} }
private void addUserDefinedTypes(Collection<HelenusProperty> props) { private void addUserDefinedTypes(Collection<HelenusProperty> props) {
for (HelenusProperty prop : props) { for (HelenusProperty prop : props) {
AbstractDataType type = prop.getDataType(); AbstractDataType type = prop.getDataType();
if (type instanceof DTDataType) { if (type instanceof DTDataType) {
continue; continue;
} }
if (!UDTValue.class.isAssignableFrom(prop.getJavaType())) { if (!UDTValue.class.isAssignableFrom(prop.getJavaType())) {
for (Class<?> udtClass : type.getTypeArguments()) { for (Class<?> udtClass : type.getTypeArguments()) {
if (UDTValue.class.isAssignableFrom(udtClass)) { if (UDTValue.class.isAssignableFrom(udtClass)) {
continue; continue;
} }
HelenusEntity addedUserType = add(udtClass, OPTIONAL_UDT); HelenusEntity addedUserType = add(udtClass, OPTIONAL_UDT);
if (HelenusEntityType.UDT == prop.getEntity().getType()) { if (HelenusEntityType.UDT == prop.getEntity().getType()) {
userTypeUsesMap.put(prop.getEntity(), addedUserType); userTypeUsesMap.put(prop.getEntity(), addedUserType);
} }
} }
} }
} }
} }
} }

View file

@ -15,88 +15,98 @@
*/ */
package net.helenus.core; package net.helenus.core;
import java.util.List;
import com.datastax.driver.core.TableMetadata; import com.datastax.driver.core.TableMetadata;
import com.datastax.driver.core.schemabuilder.SchemaStatement; import com.datastax.driver.core.schemabuilder.SchemaStatement;
import java.util.List;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
import net.helenus.support.HelenusException; import net.helenus.support.HelenusException;
public final class TableOperations { public final class TableOperations {
private final AbstractSessionOperations sessionOps; private final AbstractSessionOperations sessionOps;
private final boolean dropUnusedColumns; private final boolean dropUnusedColumns;
private final boolean dropUnusedIndexes; private final boolean dropUnusedIndexes;
public TableOperations(AbstractSessionOperations sessionOps, boolean dropUnusedColumns, boolean dropUnusedIndexes) { public TableOperations(
this.sessionOps = sessionOps; AbstractSessionOperations sessionOps, boolean dropUnusedColumns, boolean dropUnusedIndexes) {
this.dropUnusedColumns = dropUnusedColumns; this.sessionOps = sessionOps;
this.dropUnusedIndexes = dropUnusedIndexes; this.dropUnusedColumns = dropUnusedColumns;
} this.dropUnusedIndexes = dropUnusedIndexes;
}
public void createTable(HelenusEntity entity) { public void createTable(HelenusEntity entity) {
sessionOps.execute(SchemaUtil.createTable(entity), true); sessionOps.execute(SchemaUtil.createTable(entity), true);
executeBatch(SchemaUtil.createIndexes(entity)); executeBatch(SchemaUtil.createIndexes(entity));
} }
public void dropTable(HelenusEntity entity) { public void dropTable(HelenusEntity entity) {
sessionOps.execute(SchemaUtil.dropTable(entity), true); sessionOps.execute(SchemaUtil.dropTable(entity), true);
} }
public void validateTable(TableMetadata tmd, HelenusEntity entity) { public void validateTable(TableMetadata tmd, HelenusEntity entity) {
if (tmd == null) { if (tmd == null) {
throw new HelenusException( throw new HelenusException(
"table does not exists " + entity.getName() + "for entity " + entity.getMappingInterface()); "table does not exists "
} + entity.getName()
+ "for entity "
+ entity.getMappingInterface());
}
List<SchemaStatement> list = SchemaUtil.alterTable(tmd, entity, dropUnusedColumns); List<SchemaStatement> list = SchemaUtil.alterTable(tmd, entity, dropUnusedColumns);
list.addAll(SchemaUtil.alterIndexes(tmd, entity, dropUnusedIndexes)); list.addAll(SchemaUtil.alterIndexes(tmd, entity, dropUnusedIndexes));
if (!list.isEmpty()) { if (!list.isEmpty()) {
throw new HelenusException( throw new HelenusException(
"schema changed for entity " + entity.getMappingInterface() + ", apply this command: " + list); "schema changed for entity "
} + entity.getMappingInterface()
} + ", apply this command: "
+ list);
}
}
public void updateTable(TableMetadata tmd, HelenusEntity entity) { public void updateTable(TableMetadata tmd, HelenusEntity entity) {
if (tmd == null) { if (tmd == null) {
createTable(entity); createTable(entity);
return; return;
} }
executeBatch(SchemaUtil.alterTable(tmd, entity, dropUnusedColumns)); executeBatch(SchemaUtil.alterTable(tmd, entity, dropUnusedColumns));
executeBatch(SchemaUtil.alterIndexes(tmd, entity, dropUnusedIndexes)); executeBatch(SchemaUtil.alterIndexes(tmd, entity, dropUnusedIndexes));
} }
public void createView(HelenusEntity entity) { public void createView(HelenusEntity entity) {
sessionOps.execute( sessionOps.execute(
SchemaUtil.createMaterializedView(sessionOps.usingKeyspace(), entity.getName().toCql(), entity), true); SchemaUtil.createMaterializedView(
// executeBatch(SchemaUtil.createIndexes(entity)); NOTE: Unfortunately C* 3.10 sessionOps.usingKeyspace(), entity.getName().toCql(), entity),
// does not yet support 2i on materialized views. true);
} // executeBatch(SchemaUtil.createIndexes(entity)); NOTE: Unfortunately C* 3.10
// does not yet support 2i on materialized views.
}
public void dropView(HelenusEntity entity) { public void dropView(HelenusEntity entity) {
sessionOps.execute( sessionOps.execute(
SchemaUtil.dropMaterializedView(sessionOps.usingKeyspace(), entity.getName().toCql(), entity), true); SchemaUtil.dropMaterializedView(
} sessionOps.usingKeyspace(), entity.getName().toCql(), entity),
true);
}
public void updateView(TableMetadata tmd, HelenusEntity entity) { public void updateView(TableMetadata tmd, HelenusEntity entity) {
if (tmd == null) { if (tmd == null) {
createTable(entity); createTable(entity);
return; return;
} }
executeBatch(SchemaUtil.alterTable(tmd, entity, dropUnusedColumns)); executeBatch(SchemaUtil.alterTable(tmd, entity, dropUnusedColumns));
executeBatch(SchemaUtil.alterIndexes(tmd, entity, dropUnusedIndexes)); executeBatch(SchemaUtil.alterIndexes(tmd, entity, dropUnusedIndexes));
} }
private void executeBatch(List<SchemaStatement> list) { private void executeBatch(List<SchemaStatement> list) {
list.forEach(s -> { list.forEach(
sessionOps.execute(s, true); s -> {
}); sessionOps.execute(s, true);
} });
}
} }

View file

@ -15,59 +15,57 @@
*/ */
package net.helenus.core; package net.helenus.core;
import com.google.common.base.Stopwatch;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
import com.google.common.base.Stopwatch;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
public interface UnitOfWork<X extends Exception> extends AutoCloseable { public interface UnitOfWork<X extends Exception> extends AutoCloseable {
/** /**
* Marks the beginning of a transactional section of work. Will write a * Marks the beginning of a transactional section of work. Will write a
* recordCacheAndDatabaseOperationCount to the shared write-ahead log. * recordCacheAndDatabaseOperationCount to the shared write-ahead log.
* *
* @return the handle used to commit or abort the work. * @return the handle used to commit or abort the work.
*/ */
UnitOfWork<X> begin(); UnitOfWork<X> begin();
void addNestedUnitOfWork(UnitOfWork<X> uow); void addNestedUnitOfWork(UnitOfWork<X> uow);
/** /**
* Checks to see if the work performed between calling begin and now can be * Checks to see if the work performed between calling begin and now can be committed or not.
* committed or not. *
* * @return a function from which to chain work that only happens when commit is successful
* @return a function from which to chain work that only happens when commit is * @throws X when the work overlaps with other concurrent writers.
* successful */
* @throws X PostCommitFunction<Void, Void> commit() throws X;
* when the work overlaps with other concurrent writers.
*/
PostCommitFunction<Void, Void> commit() throws X;
/** /**
* Explicitly abort the work within this unit of work. Any nested aborted unit * Explicitly abort the work within this unit of work. Any nested aborted unit of work will
* of work will trigger the entire unit of work to commit. * trigger the entire unit of work to commit.
*/ */
void abort(); void abort();
boolean hasAborted(); boolean hasAborted();
boolean hasCommitted(); boolean hasCommitted();
Optional<Object> cacheLookup(List<Facet> facets); Optional<Object> cacheLookup(List<Facet> facets);
void cacheUpdate(Object pojo, List<Facet> facets); void cacheUpdate(Object pojo, List<Facet> facets);
List<Facet> cacheEvict(List<Facet> facets); List<Facet> cacheEvict(List<Facet> facets);
String getPurpose(); String getPurpose();
UnitOfWork setPurpose(String purpose);
void addDatabaseTime(String name, Stopwatch amount); UnitOfWork setPurpose(String purpose);
void addCacheLookupTime(Stopwatch amount);
// Cache > 0 means "cache hit", < 0 means cache miss. void setInfo(String info);
void recordCacheAndDatabaseOperationCount(int cache, int database);
void addDatabaseTime(String name, Stopwatch amount);
void addCacheLookupTime(Stopwatch amount);
// Cache > 0 means "cache hit", < 0 means cache miss.
void recordCacheAndDatabaseOperationCount(int cache, int database);
} }

View file

@ -19,8 +19,8 @@ import net.helenus.support.HelenusException;
class UnitOfWorkImpl extends AbstractUnitOfWork<HelenusException> { class UnitOfWorkImpl extends AbstractUnitOfWork<HelenusException> {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public UnitOfWorkImpl(HelenusSession session, UnitOfWork parent) { public UnitOfWorkImpl(HelenusSession session, UnitOfWork parent) {
super(session, (AbstractUnitOfWork<HelenusException>) parent); super(session, (AbstractUnitOfWork<HelenusException>) parent);
} }
} }

View file

@ -15,63 +15,65 @@
*/ */
package net.helenus.core; package net.helenus.core;
import java.util.List;
import com.datastax.driver.core.UserType; import com.datastax.driver.core.UserType;
import com.datastax.driver.core.schemabuilder.SchemaStatement; import com.datastax.driver.core.schemabuilder.SchemaStatement;
import java.util.List;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
import net.helenus.support.HelenusException; import net.helenus.support.HelenusException;
public final class UserTypeOperations { public final class UserTypeOperations {
private final AbstractSessionOperations sessionOps; private final AbstractSessionOperations sessionOps;
private final boolean dropUnusedColumns; private final boolean dropUnusedColumns;
public UserTypeOperations(AbstractSessionOperations sessionOps, boolean dropUnusedColumns) { public UserTypeOperations(AbstractSessionOperations sessionOps, boolean dropUnusedColumns) {
this.sessionOps = sessionOps; this.sessionOps = sessionOps;
this.dropUnusedColumns = dropUnusedColumns; this.dropUnusedColumns = dropUnusedColumns;
} }
public void createUserType(HelenusEntity entity) { public void createUserType(HelenusEntity entity) {
sessionOps.execute(SchemaUtil.createUserType(entity), true); sessionOps.execute(SchemaUtil.createUserType(entity), true);
} }
public void dropUserType(HelenusEntity entity) { public void dropUserType(HelenusEntity entity) {
sessionOps.execute(SchemaUtil.dropUserType(entity), true); sessionOps.execute(SchemaUtil.dropUserType(entity), true);
} }
public void validateUserType(UserType userType, HelenusEntity entity) { public void validateUserType(UserType userType, HelenusEntity entity) {
if (userType == null) { if (userType == null) {
throw new HelenusException( throw new HelenusException(
"userType not exists " + entity.getName() + "for entity " + entity.getMappingInterface()); "userType not exists " + entity.getName() + "for entity " + entity.getMappingInterface());
} }
List<SchemaStatement> list = SchemaUtil.alterUserType(userType, entity, dropUnusedColumns); List<SchemaStatement> list = SchemaUtil.alterUserType(userType, entity, dropUnusedColumns);
if (!list.isEmpty()) { if (!list.isEmpty()) {
throw new HelenusException( throw new HelenusException(
"schema changed for entity " + entity.getMappingInterface() + ", apply this command: " + list); "schema changed for entity "
} + entity.getMappingInterface()
} + ", apply this command: "
+ list);
}
}
public void updateUserType(UserType userType, HelenusEntity entity) { public void updateUserType(UserType userType, HelenusEntity entity) {
if (userType == null) { if (userType == null) {
createUserType(entity); createUserType(entity);
return; return;
} }
executeBatch(SchemaUtil.alterUserType(userType, entity, dropUnusedColumns)); executeBatch(SchemaUtil.alterUserType(userType, entity, dropUnusedColumns));
} }
private void executeBatch(List<SchemaStatement> list) { private void executeBatch(List<SchemaStatement> list) {
list.forEach(s -> { list.forEach(
sessionOps.execute(s, true); s -> {
}); sessionOps.execute(s, true);
} });
}
} }

View file

@ -22,5 +22,4 @@ import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE) @Target(ElementType.TYPE)
public @interface Cacheable { public @interface Cacheable {}
}

View file

@ -21,14 +21,15 @@ import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy; import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target; import java.lang.annotation.Target;
import java.util.concurrent.TimeoutException; import java.util.concurrent.TimeoutException;
import net.helenus.core.ConflictingUnitOfWorkException; import net.helenus.core.ConflictingUnitOfWorkException;
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD) @Target(ElementType.METHOD)
public @interface Retry { public @interface Retry {
Class<? extends Exception>[] on() default {ConflictingUnitOfWorkException.class, TimeoutException.class}; Class<? extends Exception>[] on() default {
ConflictingUnitOfWorkException.class, TimeoutException.class
};
int times() default 3; int times() default 3;
} }

View file

@ -18,7 +18,7 @@ package net.helenus.core.aspect;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.Arrays; import java.util.Arrays;
import net.helenus.core.annotation.Retry;
import org.apache.commons.lang3.exception.ExceptionUtils; import org.apache.commons.lang3.exception.ExceptionUtils;
import org.aspectj.lang.ProceedingJoinPoint; import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around; import org.aspectj.lang.annotation.Around;
@ -29,71 +29,69 @@ import org.slf4j.LoggerFactory;
import org.springframework.core.annotation.AnnotationUtils; import org.springframework.core.annotation.AnnotationUtils;
import org.springframework.util.Assert; import org.springframework.util.Assert;
import net.helenus.core.annotation.Retry;
@Aspect @Aspect
public class RetryAspect { public class RetryAspect {
private static final Logger log = LoggerFactory.getLogger(RetryAspect.class); private static final Logger log = LoggerFactory.getLogger(RetryAspect.class);
@Around("@annotation(net.helenus.core.annotations.Retry)") @Around("@annotation(net.helenus.core.annotations.Retry)")
public Object retry(ProceedingJoinPoint pjp) throws Throwable { public Object retry(ProceedingJoinPoint pjp) throws Throwable {
Retry retryAnnotation = getRetryAnnotation(pjp); Retry retryAnnotation = getRetryAnnotation(pjp);
return (retryAnnotation != null) ? proceed(pjp, retryAnnotation) : proceed(pjp); return (retryAnnotation != null) ? proceed(pjp, retryAnnotation) : proceed(pjp);
} }
private Object proceed(ProceedingJoinPoint pjp) throws Throwable { private Object proceed(ProceedingJoinPoint pjp) throws Throwable {
return pjp.proceed(); return pjp.proceed();
} }
private Object proceed(ProceedingJoinPoint pjp, Retry retryAnnotation) throws Throwable { private Object proceed(ProceedingJoinPoint pjp, Retry retryAnnotation) throws Throwable {
int times = retryAnnotation.times(); int times = retryAnnotation.times();
Class<? extends Throwable>[] retryOn = retryAnnotation.on(); Class<? extends Throwable>[] retryOn = retryAnnotation.on();
Assert.isTrue(times > 0, "@Retry{times} should be greater than 0!"); Assert.isTrue(times > 0, "@Retry{times} should be greater than 0!");
Assert.isTrue(retryOn.length > 0, "@Retry{on} should have at least one Throwable!"); Assert.isTrue(retryOn.length > 0, "@Retry{on} should have at least one Throwable!");
log.info("Proceed with {} retries on {}", times, Arrays.toString(retryOn)); log.info("Proceed with {} retries on {}", times, Arrays.toString(retryOn));
return tryProceeding(pjp, times, retryOn); return tryProceeding(pjp, times, retryOn);
} }
private Object tryProceeding(ProceedingJoinPoint pjp, int times, Class<? extends Throwable>[] retryOn) private Object tryProceeding(
throws Throwable { ProceedingJoinPoint pjp, int times, Class<? extends Throwable>[] retryOn) throws Throwable {
try { try {
return proceed(pjp); return proceed(pjp);
} catch (Throwable throwable) { } catch (Throwable throwable) {
if (isRetryThrowable(throwable, retryOn) && times-- > 0) { if (isRetryThrowable(throwable, retryOn) && times-- > 0) {
log.info("Conflict detected, {} remaining retries on {}", times, Arrays.toString(retryOn)); log.info("Conflict detected, {} remaining retries on {}", times, Arrays.toString(retryOn));
return tryProceeding(pjp, times, retryOn); return tryProceeding(pjp, times, retryOn);
} }
throw throwable; throw throwable;
} }
} }
private boolean isRetryThrowable(Throwable throwable, Class<? extends Throwable>[] retryOn) { private boolean isRetryThrowable(Throwable throwable, Class<? extends Throwable>[] retryOn) {
Throwable[] causes = ExceptionUtils.getThrowables(throwable); Throwable[] causes = ExceptionUtils.getThrowables(throwable);
for (Throwable cause : causes) { for (Throwable cause : causes) {
for (Class<? extends Throwable> retryThrowable : retryOn) { for (Class<? extends Throwable> retryThrowable : retryOn) {
if (retryThrowable.isAssignableFrom(cause.getClass())) { if (retryThrowable.isAssignableFrom(cause.getClass())) {
return true; return true;
} }
} }
} }
return false; return false;
} }
private Retry getRetryAnnotation(ProceedingJoinPoint pjp) throws NoSuchMethodException { private Retry getRetryAnnotation(ProceedingJoinPoint pjp) throws NoSuchMethodException {
MethodSignature signature = (MethodSignature) pjp.getSignature(); MethodSignature signature = (MethodSignature) pjp.getSignature();
Method method = signature.getMethod(); Method method = signature.getMethod();
Retry retryAnnotation = AnnotationUtils.findAnnotation(method, Retry.class); Retry retryAnnotation = AnnotationUtils.findAnnotation(method, Retry.class);
if (retryAnnotation != null) { if (retryAnnotation != null) {
return retryAnnotation; return retryAnnotation;
} }
Class<?>[] argClasses = new Class[pjp.getArgs().length]; Class<?>[] argClasses = new Class[pjp.getArgs().length];
for (int i = 0; i < pjp.getArgs().length; i++) { for (int i = 0; i < pjp.getArgs().length; i++) {
argClasses[i] = pjp.getArgs()[i].getClass(); argClasses[i] = pjp.getArgs()[i].getClass();
} }
method = pjp.getTarget().getClass().getMethod(pjp.getSignature().getName(), argClasses); method = pjp.getTarget().getClass().getMethod(pjp.getSignature().getName(), argClasses);
return AnnotationUtils.findAnnotation(method, Retry.class); return AnnotationUtils.findAnnotation(method, Retry.class);
} }
} }

View file

@ -2,7 +2,7 @@ package net.helenus.core.aspect;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.Arrays; import java.util.Arrays;
import net.helenus.core.annotation.Retry;
import org.apache.commons.lang3.exception.ExceptionUtils; import org.apache.commons.lang3.exception.ExceptionUtils;
import org.aspectj.lang.ProceedingJoinPoint; import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around; import org.aspectj.lang.annotation.Around;
@ -13,71 +13,69 @@ import org.slf4j.LoggerFactory;
import org.springframework.core.annotation.AnnotationUtils; import org.springframework.core.annotation.AnnotationUtils;
import org.springframework.util.Assert; import org.springframework.util.Assert;
import net.helenus.core.annotation.Retry;
@Aspect @Aspect
public class RetryConcurrentUnitOfWorkAspect { public class RetryConcurrentUnitOfWorkAspect {
private static final Logger log = LoggerFactory.getLogger(RetryConcurrentUnitOfWorkAspect.class); private static final Logger log = LoggerFactory.getLogger(RetryConcurrentUnitOfWorkAspect.class);
@Around("@annotation(net.helenus.core.annotations.Retry)") @Around("@annotation(net.helenus.core.annotations.Retry)")
public Object retry(ProceedingJoinPoint pjp) throws Throwable { public Object retry(ProceedingJoinPoint pjp) throws Throwable {
Retry retryAnnotation = getRetryAnnotation(pjp); Retry retryAnnotation = getRetryAnnotation(pjp);
return (retryAnnotation != null) ? proceed(pjp, retryAnnotation) : proceed(pjp); return (retryAnnotation != null) ? proceed(pjp, retryAnnotation) : proceed(pjp);
} }
private Object proceed(ProceedingJoinPoint pjp) throws Throwable { private Object proceed(ProceedingJoinPoint pjp) throws Throwable {
return pjp.proceed(); return pjp.proceed();
} }
private Object proceed(ProceedingJoinPoint pjp, Retry retryAnnotation) throws Throwable { private Object proceed(ProceedingJoinPoint pjp, Retry retryAnnotation) throws Throwable {
int times = retryAnnotation.times(); int times = retryAnnotation.times();
Class<? extends Throwable>[] retryOn = retryAnnotation.on(); Class<? extends Throwable>[] retryOn = retryAnnotation.on();
Assert.isTrue(times > 0, "@Retry{times} should be greater than 0!"); Assert.isTrue(times > 0, "@Retry{times} should be greater than 0!");
Assert.isTrue(retryOn.length > 0, "@Retry{on} should have at least one Throwable!"); Assert.isTrue(retryOn.length > 0, "@Retry{on} should have at least one Throwable!");
log.info("Proceed with {} retries on {}", times, Arrays.toString(retryOn)); log.info("Proceed with {} retries on {}", times, Arrays.toString(retryOn));
return tryProceeding(pjp, times, retryOn); return tryProceeding(pjp, times, retryOn);
} }
private Object tryProceeding(ProceedingJoinPoint pjp, int times, Class<? extends Throwable>[] retryOn) private Object tryProceeding(
throws Throwable { ProceedingJoinPoint pjp, int times, Class<? extends Throwable>[] retryOn) throws Throwable {
try { try {
return proceed(pjp); return proceed(pjp);
} catch (Throwable throwable) { } catch (Throwable throwable) {
if (isRetryThrowable(throwable, retryOn) && times-- > 0) { if (isRetryThrowable(throwable, retryOn) && times-- > 0) {
log.info("Conflict detected, {} remaining retries on {}", times, Arrays.toString(retryOn)); log.info("Conflict detected, {} remaining retries on {}", times, Arrays.toString(retryOn));
return tryProceeding(pjp, times, retryOn); return tryProceeding(pjp, times, retryOn);
} }
throw throwable; throw throwable;
} }
} }
private boolean isRetryThrowable(Throwable throwable, Class<? extends Throwable>[] retryOn) { private boolean isRetryThrowable(Throwable throwable, Class<? extends Throwable>[] retryOn) {
Throwable[] causes = ExceptionUtils.getThrowables(throwable); Throwable[] causes = ExceptionUtils.getThrowables(throwable);
for (Throwable cause : causes) { for (Throwable cause : causes) {
for (Class<? extends Throwable> retryThrowable : retryOn) { for (Class<? extends Throwable> retryThrowable : retryOn) {
if (retryThrowable.isAssignableFrom(cause.getClass())) { if (retryThrowable.isAssignableFrom(cause.getClass())) {
return true; return true;
} }
} }
} }
return false; return false;
} }
private Retry getRetryAnnotation(ProceedingJoinPoint pjp) throws NoSuchMethodException { private Retry getRetryAnnotation(ProceedingJoinPoint pjp) throws NoSuchMethodException {
MethodSignature signature = (MethodSignature) pjp.getSignature(); MethodSignature signature = (MethodSignature) pjp.getSignature();
Method method = signature.getMethod(); Method method = signature.getMethod();
Retry retryAnnotation = AnnotationUtils.findAnnotation(method, Retry.class); Retry retryAnnotation = AnnotationUtils.findAnnotation(method, Retry.class);
if (retryAnnotation != null) { if (retryAnnotation != null) {
return retryAnnotation; return retryAnnotation;
} }
Class[] argClasses = new Class[pjp.getArgs().length]; Class[] argClasses = new Class[pjp.getArgs().length];
for (int i = 0; i < pjp.getArgs().length; i++) { for (int i = 0; i < pjp.getArgs().length; i++) {
argClasses[i] = pjp.getArgs()[i].getClass(); argClasses[i] = pjp.getArgs()[i].getClass();
} }
method = pjp.getTarget().getClass().getMethod(pjp.getSignature().getName(), argClasses); method = pjp.getTarget().getClass().getMethod(pjp.getSignature().getName(), argClasses);
return AnnotationUtils.findAnnotation(method, Retry.class); return AnnotationUtils.findAnnotation(method, Retry.class);
} }
} }

View file

@ -18,28 +18,37 @@ package net.helenus.core.cache;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
public class BoundFacet extends Facet<String> { public class BoundFacet extends Facet<String> {
private final Map<HelenusProperty, Object> properties; private final Map<HelenusProperty, Object> properties;
public BoundFacet(HelenusProperty property, Object value) { public BoundFacet(HelenusProperty property, Object value) {
super(property.getPropertyName(), value == null ? null : value.toString()); super(property.getPropertyName(), value == null ? null : value.toString());
this.properties = new HashMap<HelenusProperty, Object>(1); this.properties = new HashMap<HelenusProperty, Object>(1);
this.properties.put(property, value); this.properties.put(property, value);
} }
public BoundFacet(String name, Map<HelenusProperty, Object> properties) {
super(name,
(properties.keySet().size() > 1)
? "[" + String.join(", ",
properties.keySet().stream().map(key -> properties.get(key).toString())
.collect(Collectors.toSet()))
+ "]"
: String.join("", properties.keySet().stream().map(key -> properties.get(key).toString())
.collect(Collectors.toSet())));
this.properties = properties;
}
public BoundFacet(String name, Map<HelenusProperty, Object> properties) {
super(
name,
(properties.keySet().size() > 1)
? "["
+ String.join(
", ",
properties
.keySet()
.stream()
.map(key -> properties.get(key).toString())
.collect(Collectors.toSet()))
+ "]"
: String.join(
"",
properties
.keySet()
.stream()
.map(key -> properties.get(key).toString())
.collect(Collectors.toSet())));
this.properties = properties;
}
} }

View file

@ -6,59 +6,68 @@ import java.util.stream.Collectors;
public class CacheUtil { public class CacheUtil {
public static List<String[]> combinations(List<String> items) { public static List<String[]> combinations(List<String> items) {
int n = items.size(); int n = items.size();
if (n > 20 || n < 0) if (n > 20) throw new IllegalArgumentException(n + " is out of range");
throw new IllegalArgumentException(n + " is out of range"); long e = Math.round(Math.pow(2, n));
long e = Math.round(Math.pow(2, n)); List<String[]> out = new ArrayList<String[]>((int) e - 1);
List<String[]> out = new ArrayList<String[]>((int) e - 1); for (int k = 1; k <= items.size(); k++) {
for (int k = 1; k <= items.size(); k++) { kCombinations(items, 0, k, new String[k], out);
kCombinations(items, 0, k, new String[k], out); }
} return out;
return out; }
}
private static void kCombinations(List<String> items, int n, int k, String[] arr, List<String[]> out) { private static void kCombinations(
if (k == 0) { List<String> items, int n, int k, String[] arr, List<String[]> out) {
out.add(arr.clone()); if (k == 0) {
} else { out.add(arr.clone());
for (int i = n; i <= items.size() - k; i++) { } else {
arr[arr.length - k] = items.get(i); for (int i = n; i <= items.size() - k; i++) {
kCombinations(items, i + 1, k - 1, arr, out); arr[arr.length - k] = items.get(i);
} kCombinations(items, i + 1, k - 1, arr, out);
} }
} }
}
public static List<String[]> flattenFacets(List<Facet> facets) { public static List<String[]> flattenFacets(List<Facet> facets) {
List<String[]> combinations = CacheUtil.combinations( List<String[]> combinations =
facets.stream().filter(facet -> !facet.fixed()).filter(facet -> facet.value() != null).map(facet -> { CacheUtil.combinations(
return facet.name() + "==" + facet.value(); facets
}).collect(Collectors.toList())); .stream()
return combinations; .filter(facet -> !facet.fixed())
} .filter(facet -> facet.value() != null)
.map(
facet -> {
return facet.name() + "==" + facet.value();
})
.collect(Collectors.toList()));
return combinations;
}
public static Object merge(Object to, Object from) { public static Object merge(Object to, Object from) {
if (to == from) { if (to == from) {
return to; return to;
} else { } else {
return from; return from;
} }
/* /*
* // TODO(gburd): take ttl and writeTime into account when merging. Map<String, * // TODO(gburd): take ttl and writeTime into account when merging. Map<String,
* Object> toValueMap = to instanceof MapExportable ? ((MapExportable) * Object> toValueMap = to instanceof MapExportable ? ((MapExportable)
* to).toMap() : null; Map<String, Object> fromValueMap = to instanceof * to).toMap() : null; Map<String, Object> fromValueMap = to instanceof
* MapExportable ? ((MapExportable) from).toMap() : null; * MapExportable ? ((MapExportable) from).toMap() : null;
* *
* if (toValueMap != null && fromValueMap != null) { for (String key : * if (toValueMap != null && fromValueMap != null) { for (String key :
* fromValueMap.keySet()) { if (toValueMap.containsKey(key) && * fromValueMap.keySet()) { if (toValueMap.containsKey(key) &&
* toValueMap.get(key) != fromValueMap.get(key)) { toValueMap.put(key, * toValueMap.get(key) != fromValueMap.get(key)) { toValueMap.put(key,
* fromValueMap.get(key)); } } } return to; * fromValueMap.get(key)); } } } return to;
*/ */
} }
public static String schemaName(List<Facet> facets) {
return facets.stream().filter(Facet::fixed).map(facet -> facet.value().toString())
.collect(Collectors.joining("."));
}
public static String schemaName(List<Facet> facets) {
return facets
.stream()
.filter(Facet::fixed)
.map(facet -> facet.value().toString())
.collect(Collectors.joining("."));
}
} }

View file

@ -16,38 +16,35 @@
package net.helenus.core.cache; package net.helenus.core.cache;
/** /** An Entity is identifiable via one or more Facets */
* An Entity is identifiable via one or more Facets
*/
public class Facet<T> { public class Facet<T> {
private final String name; private final String name;
private T value; private T value;
private boolean fixed = false; private boolean fixed = false;
public Facet(String name) { public Facet(String name) {
this.name = name; this.name = name;
} }
public Facet(String name, T value) { public Facet(String name, T value) {
this.name = name; this.name = name;
this.value = value; this.value = value;
} }
public String name() { public String name() {
return name; return name;
} }
public T value() { public T value() {
return value; return value;
} }
public Facet setFixed() { public Facet setFixed() {
fixed = true; fixed = true;
return this; return this;
} }
public boolean fixed() {
return fixed;
}
public boolean fixed() {
return fixed;
}
} }

View file

@ -20,25 +20,24 @@ import com.google.common.cache.Cache;
public class GuavaCache<K, V> implements SessionCache<K, V> { public class GuavaCache<K, V> implements SessionCache<K, V> {
final Cache<K, V> cache; final Cache<K, V> cache;
GuavaCache(Cache<K, V> cache) { GuavaCache(Cache<K, V> cache) {
this.cache = cache; this.cache = cache;
} }
@Override @Override
public void invalidate(K key) { public void invalidate(K key) {
cache.invalidate(key); cache.invalidate(key);
} }
@Override @Override
public V get(K key) { public V get(K key) {
return cache.getIfPresent(key); return cache.getIfPresent(key);
} }
@Override
public void put(K key, V value) {
cache.put(key, value);
}
@Override
public void put(K key, V value) {
cache.put(key, value);
}
} }

View file

@ -16,21 +16,45 @@
package net.helenus.core.cache; package net.helenus.core.cache;
import java.util.concurrent.TimeUnit;
import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheBuilder;
import com.google.common.cache.RemovalListener;
import com.google.common.cache.RemovalNotification;
import java.util.concurrent.TimeUnit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public interface SessionCache<K, V> { public interface SessionCache<K, V> {
static <K, V> SessionCache<K, V> defaultCache() { static final Logger LOG = LoggerFactory.getLogger(SessionCache.class);
int MAX_CACHE_SIZE = 10000;
int MAX_CACHE_EXPIRE_SECONDS = 600;
return new GuavaCache<K, V>(CacheBuilder.newBuilder().maximumSize(MAX_CACHE_SIZE)
.expireAfterAccess(MAX_CACHE_EXPIRE_SECONDS, TimeUnit.SECONDS)
.expireAfterWrite(MAX_CACHE_EXPIRE_SECONDS, TimeUnit.SECONDS).recordStats().build());
}
void invalidate(K key); static <K, V> SessionCache<K, V> defaultCache() {
V get(K key); GuavaCache<K, V> cache;
void put(K key, V value); RemovalListener<K, V> listener =
new RemovalListener<K, V>() {
@Override
public void onRemoval(RemovalNotification<K, V> n) {
if (n.wasEvicted()) {
String cause = n.getCause().name();
LOG.info(cause);
}
}
};
cache =
new GuavaCache<K, V>(
CacheBuilder.newBuilder()
.maximumSize(25_000)
.expireAfterAccess(5, TimeUnit.MINUTES)
.softValues()
.removalListener(listener)
.build());
return cache;
}
void invalidate(K key);
V get(K key);
void put(K key, V value);
} }

View file

@ -19,56 +19,55 @@ import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import net.helenus.core.SchemaUtil; import net.helenus.core.SchemaUtil;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
public class UnboundFacet extends Facet<String> { public class UnboundFacet extends Facet<String> {
private final List<HelenusProperty> properties; private final List<HelenusProperty> properties;
public UnboundFacet(List<HelenusProperty> properties) { public UnboundFacet(List<HelenusProperty> properties) {
super(SchemaUtil.createPrimaryKeyPhrase(properties)); super(SchemaUtil.createPrimaryKeyPhrase(properties));
this.properties = properties; this.properties = properties;
} }
public UnboundFacet(HelenusProperty property) { public UnboundFacet(HelenusProperty property) {
super(property.getPropertyName()); super(property.getPropertyName());
properties = new ArrayList<HelenusProperty>(); properties = new ArrayList<HelenusProperty>();
properties.add(property); properties.add(property);
} }
public List<HelenusProperty> getProperties() { public List<HelenusProperty> getProperties() {
return properties; return properties;
} }
public Binder binder() { public Binder binder() {
return new Binder(name(), properties); return new Binder(name(), properties);
} }
public static class Binder { public static class Binder {
private final String name; private final String name;
private final List<HelenusProperty> properties = new ArrayList<HelenusProperty>(); private final List<HelenusProperty> properties = new ArrayList<HelenusProperty>();
private Map<HelenusProperty, Object> boundProperties = new HashMap<HelenusProperty, Object>(); private Map<HelenusProperty, Object> boundProperties = new HashMap<HelenusProperty, Object>();
Binder(String name, List<HelenusProperty> properties) { Binder(String name, List<HelenusProperty> properties) {
this.name = name; this.name = name;
this.properties.addAll(properties); this.properties.addAll(properties);
} }
public Binder setValueForProperty(HelenusProperty prop, Object value) { public Binder setValueForProperty(HelenusProperty prop, Object value) {
properties.remove(prop); properties.remove(prop);
boundProperties.put(prop, value); boundProperties.put(prop, value);
return this; return this;
} }
public boolean isBound() { public boolean isBound() {
return properties.isEmpty(); return properties.isEmpty();
} }
public BoundFacet bind() { public BoundFacet bind() {
return new BoundFacet(name, boundProperties); return new BoundFacet(name, boundProperties);
} }
} }
} }

View file

@ -16,132 +16,129 @@
package net.helenus.core.operation; package net.helenus.core.operation;
import java.util.*; import java.util.*;
import net.helenus.core.*; import net.helenus.core.*;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
import net.helenus.core.cache.UnboundFacet; import net.helenus.core.cache.UnboundFacet;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
public abstract class AbstractFilterOperation<E, O extends AbstractFilterOperation<E, O>> public abstract class AbstractFilterOperation<E, O extends AbstractFilterOperation<E, O>>
extends extends AbstractOperation<E, O> {
AbstractOperation<E, O> {
protected List<Filter<?>> filters = null; protected List<Filter<?>> filters = null;
protected List<Filter<?>> ifFilters = null; protected List<Filter<?>> ifFilters = null;
public AbstractFilterOperation(AbstractSessionOperations sessionOperations) { public AbstractFilterOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations); super(sessionOperations);
} }
public <V> O where(Getter<V> getter, Postulate<V> postulate) { public <V> O where(Getter<V> getter, Postulate<V> postulate) {
addFilter(Filter.create(getter, postulate)); addFilter(Filter.create(getter, postulate));
return (O) this; return (O) this;
} }
public <V> O where(Getter<V> getter, Operator operator, V val) { public <V> O where(Getter<V> getter, Operator operator, V val) {
addFilter(Filter.create(getter, operator, val)); addFilter(Filter.create(getter, operator, val));
return (O) this; return (O) this;
} }
public <V> O where(Filter<V> filter) { public <V> O where(Filter<V> filter) {
addFilter(filter); addFilter(filter);
return (O) this; return (O) this;
} }
public <V> O and(Getter<V> getter, Postulate<V> postulate) { public <V> O and(Getter<V> getter, Postulate<V> postulate) {
addFilter(Filter.create(getter, postulate)); addFilter(Filter.create(getter, postulate));
return (O) this; return (O) this;
} }
public <V> O and(Getter<V> getter, Operator operator, V val) { public <V> O and(Getter<V> getter, Operator operator, V val) {
addFilter(Filter.create(getter, operator, val)); addFilter(Filter.create(getter, operator, val));
return (O) this; return (O) this;
} }
public <V> O and(Filter<V> filter) { public <V> O and(Filter<V> filter) {
addFilter(filter); addFilter(filter);
return (O) this; return (O) this;
} }
public <V> O onlyIf(Getter<V> getter, Postulate<V> postulate) { public <V> O onlyIf(Getter<V> getter, Postulate<V> postulate) {
addIfFilter(Filter.create(getter, postulate)); addIfFilter(Filter.create(getter, postulate));
return (O) this; return (O) this;
} }
public <V> O onlyIf(Getter<V> getter, Operator operator, V val) { public <V> O onlyIf(Getter<V> getter, Operator operator, V val) {
addIfFilter(Filter.create(getter, operator, val)); addIfFilter(Filter.create(getter, operator, val));
return (O) this; return (O) this;
} }
public <V> O onlyIf(Filter<V> filter) { public <V> O onlyIf(Filter<V> filter) {
addIfFilter(filter); addIfFilter(filter);
return (O) this; return (O) this;
} }
private void addFilter(Filter<?> filter) { private void addFilter(Filter<?> filter) {
if (filters == null) { if (filters == null) {
filters = new LinkedList<Filter<?>>(); filters = new LinkedList<Filter<?>>();
} }
filters.add(filter); filters.add(filter);
} }
private void addIfFilter(Filter<?> filter) { private void addIfFilter(Filter<?> filter) {
if (ifFilters == null) { if (ifFilters == null) {
ifFilters = new LinkedList<Filter<?>>(); ifFilters = new LinkedList<Filter<?>>();
} }
ifFilters.add(filter); ifFilters.add(filter);
} }
protected List<Facet> bindFacetValues(List<Facet> facets) { protected List<Facet> bindFacetValues(List<Facet> facets) {
if (facets == null) { if (facets == null) {
return new ArrayList<Facet>(); return new ArrayList<Facet>();
} }
List<Facet> boundFacets = new ArrayList<>(); List<Facet> boundFacets = new ArrayList<>();
Map<HelenusProperty, Filter> filterMap = new HashMap<>(filters.size()); Map<HelenusProperty, Filter> filterMap = new HashMap<>(filters.size());
filters.forEach(f -> filterMap.put(f.getNode().getProperty(), f)); filters.forEach(f -> filterMap.put(f.getNode().getProperty(), f));
for (Facet facet : facets) { for (Facet facet : facets) {
if (facet instanceof UnboundFacet) { if (facet instanceof UnboundFacet) {
UnboundFacet unboundFacet = (UnboundFacet) facet; UnboundFacet unboundFacet = (UnboundFacet) facet;
UnboundFacet.Binder binder = unboundFacet.binder(); UnboundFacet.Binder binder = unboundFacet.binder();
if (filters != null) { if (filters != null) {
for (HelenusProperty prop : unboundFacet.getProperties()) { for (HelenusProperty prop : unboundFacet.getProperties()) {
Filter filter = filterMap.get(prop); Filter filter = filterMap.get(prop);
if (filter != null) { if (filter != null) {
Object[] postulates = filter.postulateValues(); Object[] postulates = filter.postulateValues();
for (Object p : postulates) { for (Object p : postulates) {
binder.setValueForProperty(prop, p.toString()); binder.setValueForProperty(prop, p.toString());
} }
} }
} }
}
} if (binder.isBound()) {
if (binder.isBound()) { boundFacets.add(binder.bind());
boundFacets.add(binder.bind()); }
} } else {
} else { boundFacets.add(facet);
boundFacets.add(facet); }
} }
} return boundFacets;
return boundFacets; }
}
} }

View file

@ -19,95 +19,94 @@ import java.util.LinkedHashMap;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import net.helenus.core.*; import net.helenus.core.*;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
public abstract class AbstractFilterOptionalOperation<E, O extends AbstractFilterOptionalOperation<E, O>> public abstract class AbstractFilterOptionalOperation<
extends E, O extends AbstractFilterOptionalOperation<E, O>>
AbstractOptionalOperation<E, O> { extends AbstractOptionalOperation<E, O> {
protected Map<HelenusProperty, Filter<?>> filters = null; protected Map<HelenusProperty, Filter<?>> filters = null;
protected List<Filter<?>> ifFilters = null; protected List<Filter<?>> ifFilters = null;
public AbstractFilterOptionalOperation(AbstractSessionOperations sessionOperations) { public AbstractFilterOptionalOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations); super(sessionOperations);
} }
public <V> O where(Getter<V> getter, Postulate<V> postulate) { public <V> O where(Getter<V> getter, Postulate<V> postulate) {
addFilter(Filter.create(getter, postulate)); addFilter(Filter.create(getter, postulate));
return (O) this; return (O) this;
} }
public <V> O where(Getter<V> getter, Operator operator, V val) { public <V> O where(Getter<V> getter, Operator operator, V val) {
addFilter(Filter.create(getter, operator, val)); addFilter(Filter.create(getter, operator, val));
return (O) this; return (O) this;
} }
public <V> O where(Filter<V> filter) { public <V> O where(Filter<V> filter) {
addFilter(filter); addFilter(filter);
return (O) this; return (O) this;
} }
public <V> O and(Getter<V> getter, Postulate<V> postulate) { public <V> O and(Getter<V> getter, Postulate<V> postulate) {
addFilter(Filter.create(getter, postulate)); addFilter(Filter.create(getter, postulate));
return (O) this; return (O) this;
} }
public <V> O and(Getter<V> getter, Operator operator, V val) { public <V> O and(Getter<V> getter, Operator operator, V val) {
addFilter(Filter.create(getter, operator, val)); addFilter(Filter.create(getter, operator, val));
return (O) this; return (O) this;
} }
public <V> O and(Filter<V> filter) { public <V> O and(Filter<V> filter) {
addFilter(filter); addFilter(filter);
return (O) this; return (O) this;
} }
public <V> O onlyIf(Getter<V> getter, Postulate<V> postulate) { public <V> O onlyIf(Getter<V> getter, Postulate<V> postulate) {
addIfFilter(Filter.create(getter, postulate)); addIfFilter(Filter.create(getter, postulate));
return (O) this; return (O) this;
} }
public <V> O onlyIf(Getter<V> getter, Operator operator, V val) { public <V> O onlyIf(Getter<V> getter, Operator operator, V val) {
addIfFilter(Filter.create(getter, operator, val)); addIfFilter(Filter.create(getter, operator, val));
return (O) this; return (O) this;
} }
public <V> O onlyIf(Filter<V> filter) { public <V> O onlyIf(Filter<V> filter) {
addIfFilter(filter); addIfFilter(filter);
return (O) this; return (O) this;
} }
private void addFilter(Filter<?> filter) { private void addFilter(Filter<?> filter) {
if (filters == null) { if (filters == null) {
filters = new LinkedHashMap<HelenusProperty, Filter<?>>(); filters = new LinkedHashMap<HelenusProperty, Filter<?>>();
} }
filters.put(filter.getNode().getProperty(), filter); filters.put(filter.getNode().getProperty(), filter);
} }
private void addIfFilter(Filter<?> filter) { private void addIfFilter(Filter<?> filter) {
if (ifFilters == null) { if (ifFilters == null) {
ifFilters = new LinkedList<Filter<?>>(); ifFilters = new LinkedList<Filter<?>>();
} }
ifFilters.add(filter); ifFilters.add(filter);
} }
} }

View file

@ -19,95 +19,94 @@ import java.util.LinkedHashMap;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import net.helenus.core.*; import net.helenus.core.*;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
public abstract class AbstractFilterStreamOperation<E, O extends AbstractFilterStreamOperation<E, O>> public abstract class AbstractFilterStreamOperation<
extends E, O extends AbstractFilterStreamOperation<E, O>>
AbstractStreamOperation<E, O> { extends AbstractStreamOperation<E, O> {
protected Map<HelenusProperty, Filter<?>> filters = null; protected Map<HelenusProperty, Filter<?>> filters = null;
protected List<Filter<?>> ifFilters = null; protected List<Filter<?>> ifFilters = null;
public AbstractFilterStreamOperation(AbstractSessionOperations sessionOperations) { public AbstractFilterStreamOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations); super(sessionOperations);
} }
public <V> O where(Getter<V> getter, Postulate<V> postulate) { public <V> O where(Getter<V> getter, Postulate<V> postulate) {
addFilter(Filter.create(getter, postulate)); addFilter(Filter.create(getter, postulate));
return (O) this; return (O) this;
} }
public <V> O where(Getter<V> getter, Operator operator, V val) { public <V> O where(Getter<V> getter, Operator operator, V val) {
addFilter(Filter.create(getter, operator, val)); addFilter(Filter.create(getter, operator, val));
return (O) this; return (O) this;
} }
public <V> O where(Filter<V> filter) { public <V> O where(Filter<V> filter) {
addFilter(filter); addFilter(filter);
return (O) this; return (O) this;
} }
public <V> O and(Getter<V> getter, Postulate<V> postulate) { public <V> O and(Getter<V> getter, Postulate<V> postulate) {
addFilter(Filter.create(getter, postulate)); addFilter(Filter.create(getter, postulate));
return (O) this; return (O) this;
} }
public <V> O and(Getter<V> getter, Operator operator, V val) { public <V> O and(Getter<V> getter, Operator operator, V val) {
addFilter(Filter.create(getter, operator, val)); addFilter(Filter.create(getter, operator, val));
return (O) this; return (O) this;
} }
public <V> O and(Filter<V> filter) { public <V> O and(Filter<V> filter) {
addFilter(filter); addFilter(filter);
return (O) this; return (O) this;
} }
public <V> O onlyIf(Getter<V> getter, Postulate<V> postulate) { public <V> O onlyIf(Getter<V> getter, Postulate<V> postulate) {
addIfFilter(Filter.create(getter, postulate)); addIfFilter(Filter.create(getter, postulate));
return (O) this; return (O) this;
} }
public <V> O onlyIf(Getter<V> getter, Operator operator, V val) { public <V> O onlyIf(Getter<V> getter, Operator operator, V val) {
addIfFilter(Filter.create(getter, operator, val)); addIfFilter(Filter.create(getter, operator, val));
return (O) this; return (O) this;
} }
public <V> O onlyIf(Filter<V> filter) { public <V> O onlyIf(Filter<V> filter) {
addIfFilter(filter); addIfFilter(filter);
return (O) this; return (O) this;
} }
private void addFilter(Filter<?> filter) { private void addFilter(Filter<?> filter) {
if (filters == null) { if (filters == null) {
filters = new LinkedHashMap<HelenusProperty, Filter<?>>(); filters = new LinkedHashMap<HelenusProperty, Filter<?>>();
} }
filters.put(filter.getNode().getProperty(), filter); filters.put(filter.getNode().getProperty(), filter);
} }
private void addIfFilter(Filter<?> filter) { private void addIfFilter(Filter<?> filter) {
if (ifFilters == null) { if (ifFilters == null) {
ifFilters = new LinkedList<Filter<?>>(); ifFilters = new LinkedList<Filter<?>>();
} }
ifFilters.add(filter); ifFilters.add(filter);
} }
} }

View file

@ -15,73 +15,86 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import com.codahale.metrics.Timer;
import com.datastax.driver.core.ResultSet;
import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException; import java.util.concurrent.CompletionException;
import java.util.concurrent.TimeoutException; import java.util.concurrent.TimeoutException;
import com.codahale.metrics.Timer;
import com.datastax.driver.core.ResultSet;
import net.helenus.core.AbstractSessionOperations; import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.UnitOfWork; import net.helenus.core.UnitOfWork;
public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>> extends AbstractStatementOperation<E, O> { public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>>
extends AbstractStatementOperation<E, O> {
public AbstractOperation(AbstractSessionOperations sessionOperations) { public AbstractOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations); super(sessionOperations);
} }
public abstract E transform(ResultSet resultSet); public abstract E transform(ResultSet resultSet);
public PreparedOperation<E> prepare() { public PreparedOperation<E> prepare() {
return new PreparedOperation<E>(prepareStatement(), this); return new PreparedOperation<E>(prepareStatement(), this);
} }
public E sync() throws TimeoutException { public E sync() throws TimeoutException {
final Timer.Context context = requestLatency.time(); final Timer.Context context = requestLatency.time();
try { try {
ResultSet resultSet = this.execute(sessionOps, null, traceContext, queryExecutionTimeout, queryTimeoutUnits, ResultSet resultSet =
showValues, false); this.execute(
return transform(resultSet); sessionOps,
} finally { null,
context.stop(); traceContext,
} queryExecutionTimeout,
} queryTimeoutUnits,
showValues,
false);
return transform(resultSet);
} finally {
context.stop();
}
}
public E sync(UnitOfWork uow) throws TimeoutException { public E sync(UnitOfWork uow) throws TimeoutException {
if (uow == null) if (uow == null) return sync();
return sync();
final Timer.Context context = requestLatency.time(); final Timer.Context context = requestLatency.time();
try { try {
ResultSet resultSet = execute(sessionOps, uow, traceContext, queryExecutionTimeout, queryTimeoutUnits, ResultSet resultSet =
showValues, true); execute(
E result = transform(resultSet); sessionOps,
return result; uow,
} finally { traceContext,
context.stop(); queryExecutionTimeout,
} queryTimeoutUnits,
} showValues,
true);
E result = transform(resultSet);
return result;
} finally {
context.stop();
}
}
public CompletableFuture<E> async() { public CompletableFuture<E> async() {
return CompletableFuture.<E>supplyAsync(() -> { return CompletableFuture.<E>supplyAsync(
try { () -> {
return sync(); try {
} catch (TimeoutException ex) { return sync();
throw new CompletionException(ex); } catch (TimeoutException ex) {
} throw new CompletionException(ex);
}); }
} });
}
public CompletableFuture<E> async(UnitOfWork uow) { public CompletableFuture<E> async(UnitOfWork uow) {
if (uow == null) if (uow == null) return async();
return async(); return CompletableFuture.<E>supplyAsync(
return CompletableFuture.<E>supplyAsync(() -> { () -> {
try { try {
return sync(); return sync();
} catch (TimeoutException ex) { } catch (TimeoutException ex) {
throw new CompletionException(ex); throw new CompletionException(ex);
} }
}); });
} }
} }

View file

@ -17,12 +17,6 @@ package net.helenus.core.operation;
import static net.helenus.core.HelenusSession.deleted; import static net.helenus.core.HelenusSession.deleted;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.concurrent.TimeoutException;
import com.codahale.metrics.Timer; import com.codahale.metrics.Timer;
import com.datastax.driver.core.PreparedStatement; import com.datastax.driver.core.PreparedStatement;
import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.ResultSet;
@ -30,180 +24,198 @@ import com.google.common.base.Function;
import com.google.common.base.Stopwatch; import com.google.common.base.Stopwatch;
import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListenableFuture;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.concurrent.TimeoutException;
import net.helenus.core.AbstractSessionOperations; import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.UnitOfWork; import net.helenus.core.UnitOfWork;
import net.helenus.core.cache.CacheUtil; import net.helenus.core.cache.CacheUtil;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOperation<E, O>> public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOperation<E, O>>
extends extends AbstractStatementOperation<E, O> {
AbstractStatementOperation<E, O> {
public AbstractOptionalOperation(AbstractSessionOperations sessionOperations) { public AbstractOptionalOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations); super(sessionOperations);
} }
public abstract Optional<E> transform(ResultSet resultSet); public abstract Optional<E> transform(ResultSet resultSet);
public PreparedOptionalOperation<E> prepare() { public PreparedOptionalOperation<E> prepare() {
return new PreparedOptionalOperation<E>(prepareStatement(), this); return new PreparedOptionalOperation<E>(prepareStatement(), this);
} }
public ListenableFuture<PreparedOptionalOperation<E>> prepareAsync() { public ListenableFuture<PreparedOptionalOperation<E>> prepareAsync() {
final O _this = (O) this; final O _this = (O) this;
return Futures.transform(prepareStatementAsync(), return Futures.transform(
new Function<PreparedStatement, PreparedOptionalOperation<E>>() { prepareStatementAsync(),
@Override new Function<PreparedStatement, PreparedOptionalOperation<E>>() {
public PreparedOptionalOperation<E> apply(PreparedStatement preparedStatement) { @Override
return new PreparedOptionalOperation<E>(preparedStatement, _this); public PreparedOptionalOperation<E> apply(PreparedStatement preparedStatement) {
} return new PreparedOptionalOperation<E>(preparedStatement, _this);
}); }
} });
}
public Optional<E> sync() throws TimeoutException { public Optional<E> sync() throws TimeoutException {
final Timer.Context context = requestLatency.time(); final Timer.Context context = requestLatency.time();
try { try {
Optional<E> result = Optional.empty(); Optional<E> result = Optional.empty();
E cacheResult = null; E cacheResult = null;
boolean updateCache = isSessionCacheable() && checkCache; boolean updateCache = isSessionCacheable() && checkCache;
if (checkCache && isSessionCacheable()) { if (checkCache && isSessionCacheable()) {
List<Facet> facets = bindFacetValues(); List<Facet> facets = bindFacetValues();
String tableName = CacheUtil.schemaName(facets); String tableName = CacheUtil.schemaName(facets);
cacheResult = (E) sessionOps.checkCache(tableName, facets); cacheResult = (E) sessionOps.checkCache(tableName, facets);
if (cacheResult != null) { if (cacheResult != null) {
result = Optional.of(cacheResult); result = Optional.of(cacheResult);
updateCache = false; updateCache = false;
sessionCacheHits.mark(); sessionCacheHits.mark();
cacheHits.mark(); cacheHits.mark();
} else { } else {
sessionCacheMiss.mark(); sessionCacheMiss.mark();
cacheMiss.mark(); cacheMiss.mark();
} }
} }
if (!result.isPresent()) { if (!result.isPresent()) {
// Formulate the query and execute it against the Cassandra cluster. // Formulate the query and execute it against the Cassandra cluster.
ResultSet resultSet = this.execute(sessionOps, null, traceContext, queryExecutionTimeout, ResultSet resultSet =
queryTimeoutUnits, showValues, false); this.execute(
sessionOps,
null,
traceContext,
queryExecutionTimeout,
queryTimeoutUnits,
showValues,
false);
// Transform the query result set into the desired shape. // Transform the query result set into the desired shape.
result = transform(resultSet); result = transform(resultSet);
} }
if (updateCache && result.isPresent()) { if (updateCache && result.isPresent()) {
List<Facet> facets = getFacets(); List<Facet> facets = getFacets();
if (facets != null && facets.size() > 1) { if (facets != null && facets.size() > 1) {
sessionOps.updateCache(result.get(), facets); sessionOps.updateCache(result.get(), facets);
} }
} }
return result; return result;
} finally { } finally {
context.stop(); context.stop();
} }
} }
public Optional<E> sync(UnitOfWork<?> uow) throws TimeoutException { public Optional<E> sync(UnitOfWork<?> uow) throws TimeoutException {
if (uow == null) if (uow == null) return sync();
return sync();
final Timer.Context context = requestLatency.time(); final Timer.Context context = requestLatency.time();
try { try {
Optional<E> result = Optional.empty(); Optional<E> result = Optional.empty();
E cachedResult = null; E cachedResult = null;
final boolean updateCache; final boolean updateCache;
if (checkCache) { if (checkCache) {
Stopwatch timer = Stopwatch.createStarted(); Stopwatch timer = Stopwatch.createStarted();
try { try {
List<Facet> facets = bindFacetValues(); List<Facet> facets = bindFacetValues();
if (facets != null) { if (facets != null) {
cachedResult = checkCache(uow, facets); cachedResult = checkCache(uow, facets);
if (cachedResult != null) { if (cachedResult != null) {
updateCache = false; updateCache = false;
result = Optional.of(cachedResult); result = Optional.of(cachedResult);
uowCacheHits.mark(); uowCacheHits.mark();
cacheHits.mark(); cacheHits.mark();
uow.recordCacheAndDatabaseOperationCount(1, 0); uow.recordCacheAndDatabaseOperationCount(1, 0);
} else { } else {
updateCache = true; updateCache = true;
uowCacheMiss.mark(); uowCacheMiss.mark();
if (isSessionCacheable()) { if (isSessionCacheable()) {
String tableName = CacheUtil.schemaName(facets); String tableName = CacheUtil.schemaName(facets);
cachedResult = (E) sessionOps.checkCache(tableName, facets); cachedResult = (E) sessionOps.checkCache(tableName, facets);
if (cachedResult != null) { if (cachedResult != null) {
result = Optional.of(cachedResult); result = Optional.of(cachedResult);
sessionCacheHits.mark(); sessionCacheHits.mark();
cacheHits.mark(); cacheHits.mark();
uow.recordCacheAndDatabaseOperationCount(1, 0); uow.recordCacheAndDatabaseOperationCount(1, 0);
} else { } else {
sessionCacheMiss.mark(); sessionCacheMiss.mark();
cacheMiss.mark(); cacheMiss.mark();
uow.recordCacheAndDatabaseOperationCount(-1, 0); uow.recordCacheAndDatabaseOperationCount(-1, 0);
} }
} }
} }
} else { } else {
updateCache = false; updateCache = false;
} }
} finally { } finally {
timer.stop(); timer.stop();
uow.addCacheLookupTime(timer); uow.addCacheLookupTime(timer);
} }
} else { } else {
updateCache = false; updateCache = false;
} }
// Check to see if we fetched the object from the cache // Check to see if we fetched the object from the cache
if (result.isPresent()) { if (result.isPresent()) {
// If we fetched the `deleted` object then the result is null (really // If we fetched the `deleted` object then the result is null (really
// Optional.empty()). // Optional.empty()).
if (result.get() == deleted) { if (result.get() == deleted) {
result = Optional.empty(); result = Optional.empty();
} }
} else { } else {
// Formulate the query and execute it against the Cassandra cluster. // Formulate the query and execute it against the Cassandra cluster.
ResultSet resultSet = execute(sessionOps, uow, traceContext, queryExecutionTimeout, queryTimeoutUnits, ResultSet resultSet =
showValues, true); execute(
sessionOps,
uow,
traceContext,
queryExecutionTimeout,
queryTimeoutUnits,
showValues,
true);
// Transform the query result set into the desired shape. // Transform the query result set into the desired shape.
result = transform(resultSet); result = transform(resultSet);
} }
// If we have a result, it wasn't from the UOW cache, and we're caching things // If we have a result, it wasn't from the UOW cache, and we're caching things
// then we need to put this result into the cache for future requests to find. // then we need to put this result into the cache for future requests to find.
if (updateCache && result.isPresent() && result.get() != deleted) { if (updateCache && result.isPresent() && result.get() != deleted) {
cacheUpdate(uow, result.get(), getFacets()); cacheUpdate(uow, result.get(), getFacets());
} }
return result; return result;
} finally { } finally {
context.stop(); context.stop();
} }
} }
public CompletableFuture<Optional<E>> async() { public CompletableFuture<Optional<E>> async() {
return CompletableFuture.<Optional<E>>supplyAsync(() -> { return CompletableFuture.<Optional<E>>supplyAsync(
try { () -> {
return sync(); try {
} catch (TimeoutException ex) { return sync();
throw new CompletionException(ex); } catch (TimeoutException ex) {
} throw new CompletionException(ex);
}); }
} });
}
public CompletableFuture<Optional<E>> async(UnitOfWork<?> uow) { public CompletableFuture<Optional<E>> async(UnitOfWork<?> uow) {
if (uow == null) if (uow == null) return async();
return async(); return CompletableFuture.<Optional<E>>supplyAsync(
return CompletableFuture.<Optional<E>>supplyAsync(() -> { () -> {
try { try {
return sync(); return sync();
} catch (TimeoutException ex) { } catch (TimeoutException ex) {
throw new CompletionException(ex); throw new CompletionException(ex);
} }
}); });
} }
} }

View file

@ -15,12 +15,8 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import java.util.ArrayList; import brave.Tracer;
import java.util.List; import brave.propagation.TraceContext;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
import com.datastax.driver.core.ConsistencyLevel; import com.datastax.driver.core.ConsistencyLevel;
import com.datastax.driver.core.PreparedStatement; import com.datastax.driver.core.PreparedStatement;
import com.datastax.driver.core.RegularStatement; import com.datastax.driver.core.RegularStatement;
@ -31,9 +27,11 @@ import com.datastax.driver.core.policies.FallthroughRetryPolicy;
import com.datastax.driver.core.policies.RetryPolicy; import com.datastax.driver.core.policies.RetryPolicy;
import com.datastax.driver.core.querybuilder.BuiltStatement; import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListenableFuture;
import java.util.ArrayList;
import brave.Tracer; import java.util.List;
import brave.propagation.TraceContext; import java.util.Map;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
import net.helenus.core.AbstractSessionOperations; import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.UnitOfWork; import net.helenus.core.UnitOfWork;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
@ -43,323 +41,324 @@ import net.helenus.mapping.HelenusProperty;
import net.helenus.mapping.value.BeanColumnValueProvider; import net.helenus.mapping.value.BeanColumnValueProvider;
import net.helenus.support.HelenusException; import net.helenus.support.HelenusException;
public abstract class AbstractStatementOperation<E, O extends AbstractStatementOperation<E, O>> extends Operation<E> { public abstract class AbstractStatementOperation<E, O extends AbstractStatementOperation<E, O>>
extends Operation<E> {
protected boolean checkCache = true;
protected boolean showValues = true; protected boolean checkCache = true;
protected TraceContext traceContext; protected boolean showValues = true;
long queryExecutionTimeout = 10; protected TraceContext traceContext;
TimeUnit queryTimeoutUnits = TimeUnit.SECONDS; long queryExecutionTimeout = 10;
private ConsistencyLevel consistencyLevel; TimeUnit queryTimeoutUnits = TimeUnit.SECONDS;
private ConsistencyLevel serialConsistencyLevel; private ConsistencyLevel consistencyLevel;
private RetryPolicy retryPolicy; private ConsistencyLevel serialConsistencyLevel;
private boolean idempotent = false; private RetryPolicy retryPolicy;
private boolean enableTracing = false; private boolean idempotent = false;
private long[] defaultTimestamp = null; private boolean enableTracing = false;
private int[] fetchSize = null; private long[] defaultTimestamp = null;
private int[] fetchSize = null;
public AbstractStatementOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations); public AbstractStatementOperation(AbstractSessionOperations sessionOperations) {
this.consistencyLevel = sessionOperations.getDefaultConsistencyLevel(); super(sessionOperations);
this.idempotent = sessionOperations.getDefaultQueryIdempotency(); this.consistencyLevel = sessionOperations.getDefaultConsistencyLevel();
} this.idempotent = sessionOperations.getDefaultQueryIdempotency();
}
public abstract Statement buildStatement(boolean cached);
public abstract Statement buildStatement(boolean cached);
public O uncached(boolean enabled) {
checkCache = enabled; public O uncached(boolean enabled) {
return (O) this; checkCache = enabled;
} return (O) this;
}
public O uncached() {
checkCache = false; public O uncached() {
return (O) this; checkCache = false;
} return (O) this;
}
public O showValues(boolean enabled) {
this.showValues = enabled; public O showValues(boolean enabled) {
return (O) this; this.showValues = enabled;
} return (O) this;
}
public O defaultTimestamp(long timestamp) {
this.defaultTimestamp = new long[1]; public O defaultTimestamp(long timestamp) {
this.defaultTimestamp[0] = timestamp; this.defaultTimestamp = new long[1];
return (O) this; this.defaultTimestamp[0] = timestamp;
} return (O) this;
}
public O retryPolicy(RetryPolicy retryPolicy) {
this.retryPolicy = retryPolicy; public O retryPolicy(RetryPolicy retryPolicy) {
return (O) this; this.retryPolicy = retryPolicy;
} return (O) this;
}
public O defaultRetryPolicy() {
this.retryPolicy = DefaultRetryPolicy.INSTANCE; public O defaultRetryPolicy() {
return (O) this; this.retryPolicy = DefaultRetryPolicy.INSTANCE;
} return (O) this;
}
public O idempotent() {
this.idempotent = true; public O idempotent() {
return (O) this; this.idempotent = true;
} return (O) this;
}
public O isIdempotent(boolean idempotent) {
this.idempotent = idempotent; public O isIdempotent(boolean idempotent) {
return (O) this; this.idempotent = idempotent;
} return (O) this;
}
public O downgradingConsistencyRetryPolicy() {
this.retryPolicy = DowngradingConsistencyRetryPolicy.INSTANCE; public O downgradingConsistencyRetryPolicy() {
return (O) this; this.retryPolicy = DowngradingConsistencyRetryPolicy.INSTANCE;
} return (O) this;
}
public O fallthroughRetryPolicy() {
this.retryPolicy = FallthroughRetryPolicy.INSTANCE; public O fallthroughRetryPolicy() {
return (O) this; this.retryPolicy = FallthroughRetryPolicy.INSTANCE;
} return (O) this;
}
public O consistency(ConsistencyLevel level) {
this.consistencyLevel = level; public O consistency(ConsistencyLevel level) {
return (O) this; this.consistencyLevel = level;
} return (O) this;
}
public O consistencyAny() {
this.consistencyLevel = ConsistencyLevel.ANY; public O consistencyAny() {
return (O) this; this.consistencyLevel = ConsistencyLevel.ANY;
} return (O) this;
}
public O consistencyOne() {
this.consistencyLevel = ConsistencyLevel.ONE; public O consistencyOne() {
return (O) this; this.consistencyLevel = ConsistencyLevel.ONE;
} return (O) this;
}
public O consistencyQuorum() {
this.consistencyLevel = ConsistencyLevel.QUORUM; public O consistencyQuorum() {
return (O) this; this.consistencyLevel = ConsistencyLevel.QUORUM;
} return (O) this;
}
public O consistencyAll() {
this.consistencyLevel = ConsistencyLevel.ALL; public O consistencyAll() {
return (O) this; this.consistencyLevel = ConsistencyLevel.ALL;
} return (O) this;
}
public O consistencyLocalOne() {
this.consistencyLevel = ConsistencyLevel.LOCAL_ONE; public O consistencyLocalOne() {
return (O) this; this.consistencyLevel = ConsistencyLevel.LOCAL_ONE;
} return (O) this;
}
public O consistencyLocalQuorum() {
this.consistencyLevel = ConsistencyLevel.LOCAL_QUORUM; public O consistencyLocalQuorum() {
return (O) this; this.consistencyLevel = ConsistencyLevel.LOCAL_QUORUM;
} return (O) this;
}
public O consistencyEachQuorum() {
this.consistencyLevel = ConsistencyLevel.EACH_QUORUM; public O consistencyEachQuorum() {
return (O) this; this.consistencyLevel = ConsistencyLevel.EACH_QUORUM;
} return (O) this;
}
public O serialConsistency(ConsistencyLevel level) {
this.serialConsistencyLevel = level; public O serialConsistency(ConsistencyLevel level) {
return (O) this; this.serialConsistencyLevel = level;
} return (O) this;
}
public O serialConsistencyAny() {
this.serialConsistencyLevel = ConsistencyLevel.ANY; public O serialConsistencyAny() {
return (O) this; this.serialConsistencyLevel = ConsistencyLevel.ANY;
} return (O) this;
}
public O serialConsistencyOne() {
this.serialConsistencyLevel = ConsistencyLevel.ONE; public O serialConsistencyOne() {
return (O) this; this.serialConsistencyLevel = ConsistencyLevel.ONE;
} return (O) this;
}
public O serialConsistencyQuorum() {
this.serialConsistencyLevel = ConsistencyLevel.QUORUM; public O serialConsistencyQuorum() {
return (O) this; this.serialConsistencyLevel = ConsistencyLevel.QUORUM;
} return (O) this;
}
public O serialConsistencyAll() {
this.serialConsistencyLevel = ConsistencyLevel.ALL; public O serialConsistencyAll() {
return (O) this; this.serialConsistencyLevel = ConsistencyLevel.ALL;
} return (O) this;
}
public O serialConsistencyLocal() {
this.serialConsistencyLevel = ConsistencyLevel.LOCAL_SERIAL; public O serialConsistencyLocal() {
return (O) this; this.serialConsistencyLevel = ConsistencyLevel.LOCAL_SERIAL;
} return (O) this;
}
public O serialConsistencyLocalQuorum() {
this.serialConsistencyLevel = ConsistencyLevel.LOCAL_QUORUM; public O serialConsistencyLocalQuorum() {
return (O) this; this.serialConsistencyLevel = ConsistencyLevel.LOCAL_QUORUM;
} return (O) this;
}
public O disableTracing() {
this.enableTracing = false; public O disableTracing() {
return (O) this; this.enableTracing = false;
} return (O) this;
}
public O enableTracing() {
this.enableTracing = true; public O enableTracing() {
return (O) this; this.enableTracing = true;
} return (O) this;
}
public O tracing(boolean enable) {
this.enableTracing = enable; public O tracing(boolean enable) {
return (O) this; this.enableTracing = enable;
} return (O) this;
}
public O fetchSize(int fetchSize) {
this.fetchSize = new int[1]; public O fetchSize(int fetchSize) {
this.fetchSize[0] = fetchSize; this.fetchSize = new int[1];
return (O) this; this.fetchSize[0] = fetchSize;
} return (O) this;
}
public O queryTimeoutMs(long ms) {
this.queryExecutionTimeout = ms; public O queryTimeoutMs(long ms) {
this.queryTimeoutUnits = TimeUnit.MILLISECONDS; this.queryExecutionTimeout = ms;
return (O) this; this.queryTimeoutUnits = TimeUnit.MILLISECONDS;
} return (O) this;
}
public O queryTimeout(long timeout, TimeUnit units) {
this.queryExecutionTimeout = timeout; public O queryTimeout(long timeout, TimeUnit units) {
this.queryTimeoutUnits = units; this.queryExecutionTimeout = timeout;
return (O) this; this.queryTimeoutUnits = units;
} return (O) this;
}
public Statement options(Statement statement) {
public Statement options(Statement statement) {
if (defaultTimestamp != null) {
statement.setDefaultTimestamp(defaultTimestamp[0]); if (defaultTimestamp != null) {
} statement.setDefaultTimestamp(defaultTimestamp[0]);
}
if (consistencyLevel != null) {
statement.setConsistencyLevel(consistencyLevel); if (consistencyLevel != null) {
} statement.setConsistencyLevel(consistencyLevel);
}
if (serialConsistencyLevel != null) {
statement.setSerialConsistencyLevel(serialConsistencyLevel); if (serialConsistencyLevel != null) {
} statement.setSerialConsistencyLevel(serialConsistencyLevel);
}
if (retryPolicy != null) {
statement.setRetryPolicy(retryPolicy); if (retryPolicy != null) {
} statement.setRetryPolicy(retryPolicy);
}
if (enableTracing) {
statement.enableTracing(); if (enableTracing) {
} else { statement.enableTracing();
statement.disableTracing(); } else {
} statement.disableTracing();
}
if (fetchSize != null) {
statement.setFetchSize(fetchSize[0]); if (fetchSize != null) {
} statement.setFetchSize(fetchSize[0]);
}
if (idempotent) {
statement.setIdempotent(true); if (idempotent) {
} statement.setIdempotent(true);
}
return statement;
} return statement;
}
public O zipkinContext(TraceContext traceContext) {
if (traceContext != null) { public O zipkinContext(TraceContext traceContext) {
Tracer tracer = this.sessionOps.getZipkinTracer(); if (traceContext != null) {
if (tracer != null) { Tracer tracer = this.sessionOps.getZipkinTracer();
this.traceContext = traceContext; if (tracer != null) {
} this.traceContext = traceContext;
} }
}
return (O) this;
} return (O) this;
}
public Statement statement() {
return buildStatement(false); public Statement statement() {
} return buildStatement(false);
}
public String cql() {
Statement statement = buildStatement(false); public String cql() {
if (statement == null) Statement statement = buildStatement(false);
return ""; if (statement == null) return "";
if (statement instanceof BuiltStatement) { if (statement instanceof BuiltStatement) {
BuiltStatement buildStatement = (BuiltStatement) statement; BuiltStatement buildStatement = (BuiltStatement) statement;
return buildStatement.setForceNoValues(true).getQueryString(); return buildStatement.setForceNoValues(true).getQueryString();
} else { } else {
return statement.toString(); return statement.toString();
} }
} }
public PreparedStatement prepareStatement() { public PreparedStatement prepareStatement() {
Statement statement = buildStatement(true); Statement statement = buildStatement(true);
if (statement instanceof RegularStatement) { if (statement instanceof RegularStatement) {
RegularStatement regularStatement = (RegularStatement) statement; RegularStatement regularStatement = (RegularStatement) statement;
return sessionOps.prepare(regularStatement); return sessionOps.prepare(regularStatement);
} }
throw new HelenusException("only RegularStatements can be prepared"); throw new HelenusException("only RegularStatements can be prepared");
} }
public ListenableFuture<PreparedStatement> prepareStatementAsync() { public ListenableFuture<PreparedStatement> prepareStatementAsync() {
Statement statement = buildStatement(true); Statement statement = buildStatement(true);
if (statement instanceof RegularStatement) { if (statement instanceof RegularStatement) {
RegularStatement regularStatement = (RegularStatement) statement; RegularStatement regularStatement = (RegularStatement) statement;
return sessionOps.prepareAsync(regularStatement); return sessionOps.prepareAsync(regularStatement);
} }
throw new HelenusException("only RegularStatements can be prepared"); throw new HelenusException("only RegularStatements can be prepared");
} }
protected E checkCache(UnitOfWork<?> uow, List<Facet> facets) { protected E checkCache(UnitOfWork<?> uow, List<Facet> facets) {
E result = null; E result = null;
Optional<Object> optionalCachedResult = Optional.empty(); Optional<Object> optionalCachedResult = Optional.empty();
if (!facets.isEmpty()) { if (!facets.isEmpty()) {
optionalCachedResult = uow.cacheLookup(facets); optionalCachedResult = uow.cacheLookup(facets);
if (optionalCachedResult.isPresent()) { if (optionalCachedResult.isPresent()) {
result = (E) optionalCachedResult.get(); result = (E) optionalCachedResult.get();
} }
} }
return result; return result;
} }
protected void cacheUpdate(UnitOfWork<?> uow, E pojo, List<Facet> identifyingFacets) { protected void cacheUpdate(UnitOfWork<?> uow, E pojo, List<Facet> identifyingFacets) {
List<Facet> facets = new ArrayList<>(); List<Facet> facets = new ArrayList<>();
Map<String, Object> valueMap = pojo instanceof MapExportable ? ((MapExportable) pojo).toMap() : null; Map<String, Object> valueMap =
pojo instanceof MapExportable ? ((MapExportable) pojo).toMap() : null;
for (Facet facet : identifyingFacets) {
if (facet instanceof UnboundFacet) { for (Facet facet : identifyingFacets) {
UnboundFacet unboundFacet = (UnboundFacet) facet; if (facet instanceof UnboundFacet) {
UnboundFacet.Binder binder = unboundFacet.binder(); UnboundFacet unboundFacet = (UnboundFacet) facet;
for (HelenusProperty prop : unboundFacet.getProperties()) { UnboundFacet.Binder binder = unboundFacet.binder();
Object value; for (HelenusProperty prop : unboundFacet.getProperties()) {
if (valueMap == null) { Object value;
value = BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false); if (valueMap == null) {
if (value != null) { value = BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
binder.setValueForProperty(prop, value.toString()); if (value != null) {
} binder.setValueForProperty(prop, value.toString());
} else { }
value = valueMap.get(prop.getPropertyName()); } else {
if (value != null) { value = valueMap.get(prop.getPropertyName());
binder.setValueForProperty(prop, value.toString()); if (value != null) {
} binder.setValueForProperty(prop, value.toString());
} }
} }
if (binder.isBound()) { }
facets.add(binder.bind()); if (binder.isBound()) {
} facets.add(binder.bind());
} else { }
facets.add(facet); } else {
} facets.add(facet);
} }
}
// Cache the value (pojo), the statement key, and the fully bound facets.
uow.cacheUpdate(pojo, facets); // Cache the value (pojo), the statement key, and the fully bound facets.
} uow.cacheUpdate(pojo, facets);
}
} }

View file

@ -17,13 +17,6 @@ package net.helenus.core.operation;
import static net.helenus.core.HelenusSession.deleted; import static net.helenus.core.HelenusSession.deleted;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.concurrent.TimeoutException;
import java.util.stream.Stream;
import com.codahale.metrics.Timer; import com.codahale.metrics.Timer;
import com.datastax.driver.core.PreparedStatement; import com.datastax.driver.core.PreparedStatement;
import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.ResultSet;
@ -31,185 +24,206 @@ import com.google.common.base.Function;
import com.google.common.base.Stopwatch; import com.google.common.base.Stopwatch;
import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListenableFuture;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.concurrent.TimeoutException;
import java.util.stream.Stream;
import net.helenus.core.AbstractSessionOperations; import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.UnitOfWork; import net.helenus.core.UnitOfWork;
import net.helenus.core.cache.CacheUtil; import net.helenus.core.cache.CacheUtil;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperation<E, O>> public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperation<E, O>>
extends extends AbstractStatementOperation<E, O> {
AbstractStatementOperation<E, O> {
public AbstractStreamOperation(AbstractSessionOperations sessionOperations) { public AbstractStreamOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations); super(sessionOperations);
} }
public abstract Stream<E> transform(ResultSet resultSet); public abstract Stream<E> transform(ResultSet resultSet);
public PreparedStreamOperation<E> prepare() { public PreparedStreamOperation<E> prepare() {
return new PreparedStreamOperation<E>(prepareStatement(), this); return new PreparedStreamOperation<E>(prepareStatement(), this);
} }
public ListenableFuture<PreparedStreamOperation<E>> prepareAsync() { public ListenableFuture<PreparedStreamOperation<E>> prepareAsync() {
final O _this = (O) this; final O _this = (O) this;
return Futures.transform(prepareStatementAsync(), return Futures.transform(
new Function<PreparedStatement, PreparedStreamOperation<E>>() { prepareStatementAsync(),
@Override new Function<PreparedStatement, PreparedStreamOperation<E>>() {
public PreparedStreamOperation<E> apply(PreparedStatement preparedStatement) { @Override
return new PreparedStreamOperation<E>(preparedStatement, _this); public PreparedStreamOperation<E> apply(PreparedStatement preparedStatement) {
} return new PreparedStreamOperation<E>(preparedStatement, _this);
}); }
} });
}
public Stream<E> sync() throws TimeoutException { public Stream<E> sync() throws TimeoutException {
final Timer.Context context = requestLatency.time(); final Timer.Context context = requestLatency.time();
try { try {
Stream<E> resultStream = null; Stream<E> resultStream = null;
E cacheResult = null; E cacheResult = null;
boolean updateCache = isSessionCacheable(); boolean updateCache = isSessionCacheable();
if (checkCache && isSessionCacheable()) { if (checkCache && isSessionCacheable()) {
List<Facet> facets = bindFacetValues(); List<Facet> facets = bindFacetValues();
String tableName = CacheUtil.schemaName(facets); String tableName = CacheUtil.schemaName(facets);
cacheResult = (E) sessionOps.checkCache(tableName, facets); cacheResult = (E) sessionOps.checkCache(tableName, facets);
if (cacheResult != null) { if (cacheResult != null) {
resultStream = Stream.of(cacheResult); resultStream = Stream.of(cacheResult);
updateCache = false; updateCache = false;
sessionCacheHits.mark(); sessionCacheHits.mark();
cacheHits.mark(); cacheHits.mark();
} else { } else {
sessionCacheMiss.mark(); sessionCacheMiss.mark();
cacheMiss.mark(); cacheMiss.mark();
} }
} }
if (resultStream == null) { if (resultStream == null) {
// Formulate the query and execute it against the Cassandra cluster. // Formulate the query and execute it against the Cassandra cluster.
ResultSet resultSet = this.execute(sessionOps, null, traceContext, queryExecutionTimeout, ResultSet resultSet =
queryTimeoutUnits, showValues, false); this.execute(
sessionOps,
null,
traceContext,
queryExecutionTimeout,
queryTimeoutUnits,
showValues,
false);
// Transform the query result set into the desired shape. // Transform the query result set into the desired shape.
resultStream = transform(resultSet); resultStream = transform(resultSet);
} }
if (updateCache && resultStream != null) { if (updateCache && resultStream != null) {
List<Facet> facets = getFacets(); List<Facet> facets = getFacets();
if (facets != null && facets.size() > 1) { if (facets != null && facets.size() > 1) {
List<E> again = new ArrayList<>(); List<E> again = new ArrayList<>();
resultStream.forEach(result -> { resultStream.forEach(
sessionOps.updateCache(result, facets); result -> {
again.add(result); sessionOps.updateCache(result, facets);
}); again.add(result);
resultStream = again.stream(); });
} resultStream = again.stream();
} }
return resultStream; }
return resultStream;
} finally { } finally {
context.stop(); context.stop();
} }
} }
public Stream<E> sync(UnitOfWork uow) throws TimeoutException { public Stream<E> sync(UnitOfWork uow) throws TimeoutException {
if (uow == null) if (uow == null) return sync();
return sync();
final Timer.Context context = requestLatency.time(); final Timer.Context context = requestLatency.time();
try { try {
Stream<E> resultStream = null; Stream<E> resultStream = null;
E cachedResult = null; E cachedResult = null;
final boolean updateCache; final boolean updateCache;
if (checkCache) { if (checkCache) {
Stopwatch timer = Stopwatch.createStarted(); Stopwatch timer = Stopwatch.createStarted();
try { try {
List<Facet> facets = bindFacetValues(); List<Facet> facets = bindFacetValues();
if (facets != null) { if (facets != null) {
cachedResult = checkCache(uow, facets); cachedResult = checkCache(uow, facets);
if (cachedResult != null) { if (cachedResult != null) {
updateCache = false; updateCache = false;
resultStream = Stream.of(cachedResult); resultStream = Stream.of(cachedResult);
uowCacheHits.mark(); uowCacheHits.mark();
cacheHits.mark(); cacheHits.mark();
uow.recordCacheAndDatabaseOperationCount(1, 0); uow.recordCacheAndDatabaseOperationCount(1, 0);
} else { } else {
updateCache = true; updateCache = true;
uowCacheMiss.mark(); uowCacheMiss.mark();
if (isSessionCacheable()) { if (isSessionCacheable()) {
String tableName = CacheUtil.schemaName(facets); String tableName = CacheUtil.schemaName(facets);
cachedResult = (E) sessionOps.checkCache(tableName, facets); cachedResult = (E) sessionOps.checkCache(tableName, facets);
if (cachedResult != null) { if (cachedResult != null) {
resultStream = Stream.of(cachedResult); resultStream = Stream.of(cachedResult);
sessionCacheHits.mark(); sessionCacheHits.mark();
cacheHits.mark(); cacheHits.mark();
uow.recordCacheAndDatabaseOperationCount(1, 0); uow.recordCacheAndDatabaseOperationCount(1, 0);
} else { } else {
sessionCacheMiss.mark(); sessionCacheMiss.mark();
cacheMiss.mark(); cacheMiss.mark();
uow.recordCacheAndDatabaseOperationCount(-1, 0); uow.recordCacheAndDatabaseOperationCount(-1, 0);
} }
} }
} }
} else { } else {
updateCache = false; updateCache = false;
} }
} finally { } finally {
timer.stop(); timer.stop();
uow.addCacheLookupTime(timer); uow.addCacheLookupTime(timer);
} }
} else { } else {
updateCache = false; updateCache = false;
} }
// Check to see if we fetched the object from the cache // Check to see if we fetched the object from the cache
if (resultStream == null) { if (resultStream == null) {
ResultSet resultSet = execute(sessionOps, uow, traceContext, queryExecutionTimeout, queryTimeoutUnits, ResultSet resultSet =
showValues, true); execute(
resultStream = transform(resultSet); sessionOps,
} uow,
traceContext,
queryExecutionTimeout,
queryTimeoutUnits,
showValues,
true);
resultStream = transform(resultSet);
}
// If we have a result and we're caching then we need to put it into the cache // If we have a result and we're caching then we need to put it into the cache
// for future requests to find. // for future requests to find.
if (resultStream != null) { if (resultStream != null) {
List<E> again = new ArrayList<>(); List<E> again = new ArrayList<>();
List<Facet> facets = getFacets(); List<Facet> facets = getFacets();
resultStream.forEach(result -> { resultStream.forEach(
if (result != deleted) { result -> {
if (updateCache) { if (result != deleted) {
cacheUpdate(uow, result, facets); if (updateCache) {
} cacheUpdate(uow, result, facets);
again.add(result); }
} again.add(result);
}); }
resultStream = again.stream(); });
} resultStream = again.stream();
}
return resultStream; return resultStream;
} finally { } finally {
context.stop(); context.stop();
} }
} }
public CompletableFuture<Stream<E>> async() { public CompletableFuture<Stream<E>> async() {
return CompletableFuture.<Stream<E>>supplyAsync(() -> { return CompletableFuture.<Stream<E>>supplyAsync(
try { () -> {
return sync(); try {
} catch (TimeoutException ex) { return sync();
throw new CompletionException(ex); } catch (TimeoutException ex) {
} throw new CompletionException(ex);
}); }
} });
}
public CompletableFuture<Stream<E>> async(UnitOfWork uow) { public CompletableFuture<Stream<E>> async(UnitOfWork uow) {
if (uow == null) if (uow == null) return async();
return async(); return CompletableFuture.<Stream<E>>supplyAsync(
return CompletableFuture.<Stream<E>>supplyAsync(() -> { () -> {
try { try {
return sync(); return sync();
} catch (TimeoutException ex) { } catch (TimeoutException ex) {
throw new CompletionException(ex); throw new CompletionException(ex);
} }
}); });
} }
} }

View file

@ -21,27 +21,27 @@ import com.datastax.driver.core.Statement;
public final class BoundOperation<E> extends AbstractOperation<E, BoundOperation<E>> { public final class BoundOperation<E> extends AbstractOperation<E, BoundOperation<E>> {
private final BoundStatement boundStatement; private final BoundStatement boundStatement;
private final AbstractOperation<E, ?> delegate; private final AbstractOperation<E, ?> delegate;
public BoundOperation(BoundStatement boundStatement, AbstractOperation<E, ?> operation) { public BoundOperation(BoundStatement boundStatement, AbstractOperation<E, ?> operation) {
super(operation.sessionOps); super(operation.sessionOps);
this.boundStatement = boundStatement; this.boundStatement = boundStatement;
this.delegate = operation; this.delegate = operation;
} }
@Override @Override
public E transform(ResultSet resultSet) { public E transform(ResultSet resultSet) {
return delegate.transform(resultSet); return delegate.transform(resultSet);
} }
@Override @Override
public Statement buildStatement(boolean cached) { public Statement buildStatement(boolean cached) {
return boundStatement; return boundStatement;
} }
@Override @Override
public boolean isSessionCacheable() { public boolean isSessionCacheable() {
return delegate.isSessionCacheable(); return delegate.isSessionCacheable();
} }
} }

View file

@ -15,35 +15,36 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import java.util.Optional;
import com.datastax.driver.core.BoundStatement; import com.datastax.driver.core.BoundStatement;
import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.Statement; import com.datastax.driver.core.Statement;
import java.util.Optional;
public final class BoundOptionalOperation<E> extends AbstractOptionalOperation<E, BoundOptionalOperation<E>> { public final class BoundOptionalOperation<E>
extends AbstractOptionalOperation<E, BoundOptionalOperation<E>> {
private final BoundStatement boundStatement; private final BoundStatement boundStatement;
private final AbstractOptionalOperation<E, ?> delegate; private final AbstractOptionalOperation<E, ?> delegate;
public BoundOptionalOperation(BoundStatement boundStatement, AbstractOptionalOperation<E, ?> operation) { public BoundOptionalOperation(
super(operation.sessionOps); BoundStatement boundStatement, AbstractOptionalOperation<E, ?> operation) {
this.boundStatement = boundStatement; super(operation.sessionOps);
this.delegate = operation; this.boundStatement = boundStatement;
} this.delegate = operation;
}
@Override @Override
public Optional<E> transform(ResultSet resultSet) { public Optional<E> transform(ResultSet resultSet) {
return delegate.transform(resultSet); return delegate.transform(resultSet);
} }
@Override @Override
public Statement buildStatement(boolean cached) { public Statement buildStatement(boolean cached) {
return boundStatement; return boundStatement;
} }
@Override @Override
public boolean isSessionCacheable() { public boolean isSessionCacheable() {
return delegate.isSessionCacheable(); return delegate.isSessionCacheable();
} }
} }

View file

@ -15,43 +15,43 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import java.util.List;
import java.util.stream.Stream;
import com.datastax.driver.core.BoundStatement; import com.datastax.driver.core.BoundStatement;
import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.Statement; import com.datastax.driver.core.Statement;
import java.util.List;
import java.util.stream.Stream;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
public final class BoundStreamOperation<E> extends AbstractStreamOperation<E, BoundStreamOperation<E>> { public final class BoundStreamOperation<E>
extends AbstractStreamOperation<E, BoundStreamOperation<E>> {
private final BoundStatement boundStatement; private final BoundStatement boundStatement;
private final AbstractStreamOperation<E, ?> delegate; private final AbstractStreamOperation<E, ?> delegate;
public BoundStreamOperation(BoundStatement boundStatement, AbstractStreamOperation<E, ?> operation) { public BoundStreamOperation(
super(operation.sessionOps); BoundStatement boundStatement, AbstractStreamOperation<E, ?> operation) {
this.boundStatement = boundStatement; super(operation.sessionOps);
this.delegate = operation; this.boundStatement = boundStatement;
} this.delegate = operation;
}
@Override @Override
public List<Facet> bindFacetValues() { public List<Facet> bindFacetValues() {
return delegate.bindFacetValues(); return delegate.bindFacetValues();
} }
@Override @Override
public Stream<E> transform(ResultSet resultSet) { public Stream<E> transform(ResultSet resultSet) {
return delegate.transform(resultSet); return delegate.transform(resultSet);
} }
@Override @Override
public Statement buildStatement(boolean cached) { public Statement buildStatement(boolean cached) {
return boundStatement; return boundStatement;
} }
@Override @Override
public boolean isSessionCacheable() { public boolean isSessionCacheable() {
return delegate.isSessionCacheable(); return delegate.isSessionCacheable();
} }
} }

View file

@ -20,7 +20,6 @@ import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.datastax.driver.core.querybuilder.QueryBuilder; import com.datastax.driver.core.querybuilder.QueryBuilder;
import com.datastax.driver.core.querybuilder.Select; import com.datastax.driver.core.querybuilder.Select;
import com.datastax.driver.core.querybuilder.Select.Where; import com.datastax.driver.core.querybuilder.Select.Where;
import net.helenus.core.AbstractSessionOperations; import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.Filter; import net.helenus.core.Filter;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
@ -29,53 +28,56 @@ import net.helenus.support.HelenusMappingException;
public final class CountOperation extends AbstractFilterOperation<Long, CountOperation> { public final class CountOperation extends AbstractFilterOperation<Long, CountOperation> {
private HelenusEntity entity; private HelenusEntity entity;
public CountOperation(AbstractSessionOperations sessionOperations) { public CountOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations); super(sessionOperations);
} }
public CountOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity) { public CountOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity) {
super(sessionOperations); super(sessionOperations);
this.entity = entity; this.entity = entity;
} }
@Override @Override
public BuiltStatement buildStatement(boolean cached) { public BuiltStatement buildStatement(boolean cached) {
if (filters != null && !filters.isEmpty()) { if (filters != null && !filters.isEmpty()) {
filters.forEach(f -> addPropertyNode(f.getNode())); filters.forEach(f -> addPropertyNode(f.getNode()));
} }
if (entity == null) { if (entity == null) {
throw new HelenusMappingException("unknown entity"); throw new HelenusMappingException("unknown entity");
} }
Select select = QueryBuilder.select().countAll().from(entity.getName().toCql()); Select select = QueryBuilder.select().countAll().from(entity.getName().toCql());
if (filters != null && !filters.isEmpty()) { if (filters != null && !filters.isEmpty()) {
Where where = select.where(); Where where = select.where();
for (Filter<?> filter : filters) { for (Filter<?> filter : filters) {
where.and(filter.getClause(sessionOps.getValuePreparer())); where.and(filter.getClause(sessionOps.getValuePreparer()));
} }
} }
return select; return select;
} }
@Override @Override
public Long transform(ResultSet resultSet) { public Long transform(ResultSet resultSet) {
return resultSet.one().getLong(0); return resultSet.one().getLong(0);
} }
private void addPropertyNode(HelenusPropertyNode p) { private void addPropertyNode(HelenusPropertyNode p) {
if (entity == null) { if (entity == null) {
entity = p.getEntity(); entity = p.getEntity();
} else if (entity != p.getEntity()) { } else if (entity != p.getEntity()) {
throw new HelenusMappingException("you can count columns only in single entity " throw new HelenusMappingException(
+ entity.getMappingInterface() + " or " + p.getEntity().getMappingInterface()); "you can count columns only in single entity "
} + entity.getMappingInterface()
} + " or "
+ p.getEntity().getMappingInterface());
}
}
} }

View file

@ -15,15 +15,13 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import java.util.List;
import java.util.concurrent.TimeoutException;
import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement; import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.datastax.driver.core.querybuilder.Delete; import com.datastax.driver.core.querybuilder.Delete;
import com.datastax.driver.core.querybuilder.Delete.Where; import com.datastax.driver.core.querybuilder.Delete.Where;
import com.datastax.driver.core.querybuilder.QueryBuilder; import com.datastax.driver.core.querybuilder.QueryBuilder;
import java.util.List;
import java.util.concurrent.TimeoutException;
import net.helenus.core.AbstractSessionOperations; import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.Filter; import net.helenus.core.Filter;
import net.helenus.core.UnitOfWork; import net.helenus.core.UnitOfWork;
@ -34,126 +32,128 @@ import net.helenus.support.HelenusMappingException;
public final class DeleteOperation extends AbstractFilterOperation<ResultSet, DeleteOperation> { public final class DeleteOperation extends AbstractFilterOperation<ResultSet, DeleteOperation> {
private HelenusEntity entity; private HelenusEntity entity;
private boolean ifExists = false; private boolean ifExists = false;
private int[] ttl; private int[] ttl;
private long[] timestamp; private long[] timestamp;
public DeleteOperation(AbstractSessionOperations sessionOperations) { public DeleteOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations); super(sessionOperations);
} }
public DeleteOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity) { public DeleteOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity) {
super(sessionOperations); super(sessionOperations);
this.entity = entity; this.entity = entity;
} }
@Override @Override
public BuiltStatement buildStatement(boolean cached) { public BuiltStatement buildStatement(boolean cached) {
if (filters != null && !filters.isEmpty()) { if (filters != null && !filters.isEmpty()) {
filters.forEach(f -> addPropertyNode(f.getNode())); filters.forEach(f -> addPropertyNode(f.getNode()));
} }
if (entity == null) { if (entity == null) {
throw new HelenusMappingException("unknown entity"); throw new HelenusMappingException("unknown entity");
} }
if (filters != null && !filters.isEmpty()) { if (filters != null && !filters.isEmpty()) {
Delete delete = QueryBuilder.delete().from(entity.getName().toCql()); Delete delete = QueryBuilder.delete().from(entity.getName().toCql());
if (this.ifExists) { if (this.ifExists) {
delete.ifExists(); delete.ifExists();
} }
Where where = delete.where(); Where where = delete.where();
for (Filter<?> filter : filters) { for (Filter<?> filter : filters) {
where.and(filter.getClause(sessionOps.getValuePreparer())); where.and(filter.getClause(sessionOps.getValuePreparer()));
} }
if (ifFilters != null && !ifFilters.isEmpty()) { if (ifFilters != null && !ifFilters.isEmpty()) {
for (Filter<?> filter : ifFilters) { for (Filter<?> filter : ifFilters) {
delete.onlyIf(filter.getClause(sessionOps.getValuePreparer())); delete.onlyIf(filter.getClause(sessionOps.getValuePreparer()));
} }
} }
if (this.ttl != null) { if (this.ttl != null) {
delete.using(QueryBuilder.ttl(this.ttl[0])); delete.using(QueryBuilder.ttl(this.ttl[0]));
} }
if (this.timestamp != null) { if (this.timestamp != null) {
delete.using(QueryBuilder.timestamp(this.timestamp[0])); delete.using(QueryBuilder.timestamp(this.timestamp[0]));
} }
return delete; return delete;
} else { } else {
return QueryBuilder.truncate(entity.getName().toCql()); return QueryBuilder.truncate(entity.getName().toCql());
} }
} }
@Override @Override
public ResultSet transform(ResultSet resultSet) { public ResultSet transform(ResultSet resultSet) {
return resultSet; return resultSet;
} }
public DeleteOperation ifExists() { public DeleteOperation ifExists() {
this.ifExists = true; this.ifExists = true;
return this; return this;
} }
public DeleteOperation usingTtl(int ttl) { public DeleteOperation usingTtl(int ttl) {
this.ttl = new int[1]; this.ttl = new int[1];
this.ttl[0] = ttl; this.ttl[0] = ttl;
return this; return this;
} }
public DeleteOperation usingTimestamp(long timestamp) { public DeleteOperation usingTimestamp(long timestamp) {
this.timestamp = new long[1]; this.timestamp = new long[1];
this.timestamp[0] = timestamp; this.timestamp[0] = timestamp;
return this; return this;
} }
private void addPropertyNode(HelenusPropertyNode p) { private void addPropertyNode(HelenusPropertyNode p) {
if (entity == null) { if (entity == null) {
entity = p.getEntity(); entity = p.getEntity();
} else if (entity != p.getEntity()) { } else if (entity != p.getEntity()) {
throw new HelenusMappingException("you can delete rows only in single entity " throw new HelenusMappingException(
+ entity.getMappingInterface() + " or " + p.getEntity().getMappingInterface()); "you can delete rows only in single entity "
} + entity.getMappingInterface()
} + " or "
+ p.getEntity().getMappingInterface());
}
}
public List<Facet> bindFacetValues() { public List<Facet> bindFacetValues() {
return bindFacetValues(getFacets()); return bindFacetValues(getFacets());
} }
@Override @Override
public ResultSet sync() throws TimeoutException { public ResultSet sync() throws TimeoutException {
ResultSet result = super.sync(); ResultSet result = super.sync();
if (entity.isCacheable()) { if (entity.isCacheable()) {
sessionOps.cacheEvict(bindFacetValues()); sessionOps.cacheEvict(bindFacetValues());
} }
return result; return result;
} }
@Override @Override
public ResultSet sync(UnitOfWork uow) throws TimeoutException { public ResultSet sync(UnitOfWork uow) throws TimeoutException {
if (uow == null) { if (uow == null) {
return sync(); return sync();
} }
ResultSet result = super.sync(uow); ResultSet result = super.sync(uow);
uow.cacheEvict(bindFacetValues()); uow.cacheEvict(bindFacetValues());
return result; return result;
} }
@Override
public List<Facet> getFacets() {
return entity.getFacets();
}
@Override
public List<Facet> getFacets() {
return entity.getFacets();
}
} }

View file

@ -15,20 +15,19 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import java.util.*;
import java.util.concurrent.TimeoutException;
import java.util.function.Function;
import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement; import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.datastax.driver.core.querybuilder.Insert; import com.datastax.driver.core.querybuilder.Insert;
import com.datastax.driver.core.querybuilder.QueryBuilder; import com.datastax.driver.core.querybuilder.QueryBuilder;
import java.util.*;
import java.util.concurrent.TimeoutException;
import java.util.function.Function;
import net.helenus.core.AbstractSessionOperations; import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.Getter; import net.helenus.core.Getter;
import net.helenus.core.Helenus; import net.helenus.core.Helenus;
import net.helenus.core.UnitOfWork; import net.helenus.core.UnitOfWork;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
import net.helenus.core.cache.UnboundFacet;
import net.helenus.core.reflect.DefaultPrimitiveTypes; import net.helenus.core.reflect.DefaultPrimitiveTypes;
import net.helenus.core.reflect.Drafted; import net.helenus.core.reflect.Drafted;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
@ -42,233 +41,282 @@ import net.helenus.support.HelenusMappingException;
public final class InsertOperation<T> extends AbstractOperation<T, InsertOperation<T>> { public final class InsertOperation<T> extends AbstractOperation<T, InsertOperation<T>> {
private final List<Fun.Tuple2<HelenusPropertyNode, Object>> values = new ArrayList<Fun.Tuple2<HelenusPropertyNode, Object>>(); private final List<Fun.Tuple2<HelenusPropertyNode, Object>> values =
private final T pojo; new ArrayList<Fun.Tuple2<HelenusPropertyNode, Object>>();
private final Class<?> resultType; private final T pojo;
private HelenusEntity entity; private final Class<?> resultType;
private boolean ifNotExists; private HelenusEntity entity;
private boolean ifNotExists;
private int[] ttl; private int[] ttl;
private long[] timestamp; private long[] timestamp;
public InsertOperation(AbstractSessionOperations sessionOperations, boolean ifNotExists) { public InsertOperation(AbstractSessionOperations sessionOperations, boolean ifNotExists) {
super(sessionOperations); super(sessionOperations);
this.ifNotExists = ifNotExists; this.ifNotExists = ifNotExists;
this.pojo = null; this.pojo = null;
this.resultType = ResultSet.class; this.resultType = ResultSet.class;
} }
public InsertOperation(AbstractSessionOperations sessionOperations, Class<?> resultType, boolean ifNotExists) { public InsertOperation(
super(sessionOperations); AbstractSessionOperations sessionOperations, Class<?> resultType, boolean ifNotExists) {
super(sessionOperations);
this.ifNotExists = ifNotExists; this.ifNotExists = ifNotExists;
this.pojo = null; this.pojo = null;
this.resultType = resultType; this.resultType = resultType;
} }
public InsertOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity, T pojo, public InsertOperation(
Set<String> mutations, boolean ifNotExists) { AbstractSessionOperations sessionOperations,
super(sessionOperations); HelenusEntity entity,
T pojo,
Set<String> mutations,
boolean ifNotExists) {
super(sessionOperations);
this.entity = entity; this.entity = entity;
this.pojo = pojo; this.pojo = pojo;
this.ifNotExists = ifNotExists; this.ifNotExists = ifNotExists;
this.resultType = entity.getMappingInterface(); this.resultType = entity.getMappingInterface();
Collection<HelenusProperty> properties = entity.getOrderedProperties(); Collection<HelenusProperty> properties = entity.getOrderedProperties();
Set<String> keys = (mutations == null) ? null : mutations; Set<String> keys = (mutations == null) ? null : mutations;
for (HelenusProperty prop : properties) { for (HelenusProperty prop : properties) {
boolean addProp = false; boolean addProp = false;
switch (prop.getColumnType()) { switch (prop.getColumnType()) {
case PARTITION_KEY : case PARTITION_KEY:
case CLUSTERING_COLUMN : case CLUSTERING_COLUMN:
addProp = true; addProp = true;
break; break;
default : default:
addProp = (keys == null || keys.contains(prop.getPropertyName())); addProp = (keys == null || keys.contains(prop.getPropertyName()));
} }
if (addProp) { if (addProp) {
Object value = BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop); Object value = BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop);
value = sessionOps.getValuePreparer().prepareColumnValue(value, prop); value = sessionOps.getValuePreparer().prepareColumnValue(value, prop);
if (value != null) { if (value != null) {
HelenusPropertyNode node = new HelenusPropertyNode(prop, Optional.empty()); HelenusPropertyNode node = new HelenusPropertyNode(prop, Optional.empty());
values.add(Fun.Tuple2.of(node, value)); values.add(Fun.Tuple2.of(node, value));
} }
} }
} }
} }
public InsertOperation<T> ifNotExists() { public InsertOperation<T> ifNotExists() {
this.ifNotExists = true; this.ifNotExists = true;
return this; return this;
} }
public InsertOperation<T> ifNotExists(boolean enable) { public InsertOperation<T> ifNotExists(boolean enable) {
this.ifNotExists = enable; this.ifNotExists = enable;
return this; return this;
} }
public <V> InsertOperation<T> value(Getter<V> getter, V val) { public <V> InsertOperation<T> value(Getter<V> getter, V val) {
Objects.requireNonNull(getter, "getter is empty"); Objects.requireNonNull(getter, "getter is empty");
if (val != null) { if (val != null) {
HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter); HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter);
Object value = sessionOps.getValuePreparer().prepareColumnValue(val, node.getProperty()); Object value = sessionOps.getValuePreparer().prepareColumnValue(val, node.getProperty());
if (value != null) { if (value != null) {
values.add(Fun.Tuple2.of(node, value)); values.add(Fun.Tuple2.of(node, value));
} }
} }
return this; return this;
} }
@Override @Override
public BuiltStatement buildStatement(boolean cached) { public BuiltStatement buildStatement(boolean cached) {
values.forEach(t -> addPropertyNode(t._1)); values.forEach(t -> addPropertyNode(t._1));
if (values.isEmpty()) if (values.isEmpty()) return null;
return null;
if (entity == null) { if (entity == null) {
throw new HelenusMappingException("unknown entity"); throw new HelenusMappingException("unknown entity");
} }
Insert insert = QueryBuilder.insertInto(entity.getName().toCql()); Insert insert = QueryBuilder.insertInto(entity.getName().toCql());
if (ifNotExists) { if (ifNotExists) {
insert.ifNotExists(); insert.ifNotExists();
} }
values.forEach(t -> { values.forEach(
insert.value(t._1.getColumnName(), t._2); t -> {
}); insert.value(t._1.getColumnName(), t._2);
});
if (this.ttl != null) { if (this.ttl != null) {
insert.using(QueryBuilder.ttl(this.ttl[0])); insert.using(QueryBuilder.ttl(this.ttl[0]));
} }
if (this.timestamp != null) { if (this.timestamp != null) {
insert.using(QueryBuilder.timestamp(this.timestamp[0])); insert.using(QueryBuilder.timestamp(this.timestamp[0]));
} }
return insert; return insert;
} }
@Override @Override
public T transform(ResultSet resultSet) { public T transform(ResultSet resultSet) {
Class<?> iface = entity.getMappingInterface(); if ((ifNotExists == true) && (resultSet.wasApplied() == false)) {
if (resultType == iface) { throw new HelenusException("Statement was not applied due to consistency constraints");
if (values.size() > 0) { }
boolean immutable = iface.isAssignableFrom(Drafted.class);
Collection<HelenusProperty> properties = entity.getOrderedProperties();
Map<String, Object> backingMap = new HashMap<String, Object>(properties.size());
// First, add all the inserted values into our new map. Class<?> iface = entity.getMappingInterface();
values.forEach(t -> backingMap.put(t._1.getProperty().getPropertyName(), t._2)); if (resultType == iface) {
if (values.size() > 0) {
boolean immutable = iface.isAssignableFrom(Drafted.class);
Collection<HelenusProperty> properties = entity.getOrderedProperties();
Map<String, Object> backingMap = new HashMap<String, Object>(properties.size());
// Then, fill in all the rest of the properties. // First, add all the inserted values into our new map.
for (HelenusProperty prop : properties) { values.forEach(t -> backingMap.put(t._1.getProperty().getPropertyName(), t._2));
String key = prop.getPropertyName();
if (backingMap.containsKey(key)) {
// Some values man need to be converted (e.g. from String to Enum). This is done
// within the BeanColumnValueProvider below.
Optional<Function<Object, Object>> converter = prop
.getReadConverter(sessionOps.getSessionRepository());
if (converter.isPresent()) {
backingMap.put(key, converter.get().apply(backingMap.get(key)));
}
} else {
// If we started this operation with an instance of this type, use values from
// that.
if (pojo != null) {
backingMap.put(key,
BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, immutable));
} else {
// Otherwise we'll use default values for the property type if available.
Class<?> propType = prop.getJavaType();
if (propType.isPrimitive()) {
DefaultPrimitiveTypes type = DefaultPrimitiveTypes.lookup(propType);
if (type == null) {
throw new HelenusException("unknown primitive type " + propType);
}
backingMap.put(key, type.getDefaultValue());
}
}
}
}
// Lastly, create a new proxy object for the entity and return the new instance. // Then, fill in all the rest of the properties.
return (T) Helenus.map(iface, backingMap); for (HelenusProperty prop : properties) {
} String key = prop.getPropertyName();
// Oddly, this insert didn't change any value so simply return the pojo. if (backingMap.containsKey(key)) {
// TODO(gburd): this pojo is the result of a Draft.build() call which will not // Some values man need to be converted (e.g. from String to Enum). This is done
// preserve object identity (o1 == o2), ... fix me. // within the BeanColumnValueProvider below.
return (T) pojo; Optional<Function<Object, Object>> converter =
} prop.getReadConverter(sessionOps.getSessionRepository());
return (T) resultSet; if (converter.isPresent()) {
} backingMap.put(key, converter.get().apply(backingMap.get(key)));
}
} else {
// If we started this operation with an instance of this type, use values from
// that.
if (pojo != null) {
backingMap.put(
key, BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, immutable));
} else {
// Otherwise we'll use default values for the property type if available.
Class<?> propType = prop.getJavaType();
if (propType.isPrimitive()) {
DefaultPrimitiveTypes type = DefaultPrimitiveTypes.lookup(propType);
if (type == null) {
throw new HelenusException("unknown primitive type " + propType);
}
backingMap.put(key, type.getDefaultValue());
}
}
}
}
public InsertOperation<T> usingTtl(int ttl) { // Lastly, create a new proxy object for the entity and return the new instance.
this.ttl = new int[1]; return (T) Helenus.map(iface, backingMap);
this.ttl[0] = ttl; }
return this; // Oddly, this insert didn't change anything so simply return the pojo.
} return (T) pojo;
}
return (T) resultSet;
}
public InsertOperation<T> usingTimestamp(long timestamp) { public InsertOperation<T> usingTtl(int ttl) {
this.timestamp = new long[1]; this.ttl = new int[1];
this.timestamp[0] = timestamp; this.ttl[0] = ttl;
return this; return this;
} }
private void addPropertyNode(HelenusPropertyNode p) { public InsertOperation<T> usingTimestamp(long timestamp) {
if (entity == null) { this.timestamp = new long[1];
entity = p.getEntity(); this.timestamp[0] = timestamp;
} else if (entity != p.getEntity()) { return this;
throw new HelenusMappingException("you can insert only single entity " + entity.getMappingInterface() }
+ " or " + p.getEntity().getMappingInterface());
}
}
@Override private void addPropertyNode(HelenusPropertyNode p) {
public T sync() throws TimeoutException { if (entity == null) {
T result = super.sync(); entity = p.getEntity();
if (entity.isCacheable() && result != null) { } else if (entity != p.getEntity()) {
sessionOps.updateCache(result, entity.getFacets()); throw new HelenusMappingException(
} "you can insert only single entity "
return result; + entity.getMappingInterface()
} + " or "
+ p.getEntity().getMappingInterface());
}
}
@Override @Override
public T sync(UnitOfWork uow) throws TimeoutException { public T sync() throws TimeoutException {
if (uow == null) { T result = super.sync();
return sync(); if (entity.isCacheable() && result != null) {
} sessionOps.updateCache(result, entity.getFacets());
T result = super.sync(uow); }
Class<?> iface = entity.getMappingInterface(); return result;
if (resultType == iface) { }
cacheUpdate(uow, result, entity.getFacets());
} else {
if (entity.isCacheable()) {
sessionOps.cacheEvict(bindFacetValues());
}
}
return result;
}
@Override @Override
public List<Facet> getFacets() { public T sync(UnitOfWork uow) throws TimeoutException {
if (entity != null) { if (uow == null) {
return entity.getFacets(); return sync();
} else { }
return new ArrayList<Facet>(); T result = super.sync(uow);
} if (result != null && pojo != null && !(pojo == result) && pojo.equals(result)) {
} // To preserve object identity we need to find this object in cache
// because it was unchanged by the INSERT but pojo in this case was
// the result of a draft.build().
T cachedValue = (T) uow.cacheLookup(bindFacetValues());
if (cachedValue != null) {
result = cachedValue;
}
}
Class<?> iface = entity.getMappingInterface();
if (resultType == iface) {
cacheUpdate(uow, result, entity.getFacets());
} else {
if (entity.isCacheable()) {
sessionOps.cacheEvict(bindFacetValues());
}
}
return result;
}
@Override
public List<Facet> bindFacetValues() {
List<Facet> facets = getFacets();
if (facets == null || facets.size() == 0) {
return new ArrayList<Facet>();
}
List<Facet> boundFacets = new ArrayList<>();
Map<HelenusProperty, Object> valuesMap = new HashMap<>(values.size());
values.forEach(t -> valuesMap.put(t._1.getProperty(), t._2));
for (Facet facet : facets) {
if (facet instanceof UnboundFacet) {
UnboundFacet unboundFacet = (UnboundFacet) facet;
UnboundFacet.Binder binder = unboundFacet.binder();
for (HelenusProperty prop : unboundFacet.getProperties()) {
Object value = valuesMap.get(prop);
if (value != null) {
binder.setValueForProperty(prop, value.toString());
}
}
if (binder.isBound()) {
boundFacets.add(binder.bind());
}
} else {
boundFacets.add(facet);
}
}
return boundFacets;
}
@Override
public List<Facet> getFacets() {
if (entity != null) {
return entity.getFacets();
} else {
return new ArrayList<Facet>();
}
}
} }

View file

@ -15,153 +15,190 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import brave.Span;
import brave.Tracer;
import brave.propagation.TraceContext;
import com.codahale.metrics.Meter;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Timer;
import com.datastax.driver.core.*;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.google.common.base.Stopwatch;
import java.net.InetAddress;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException; import java.util.concurrent.TimeoutException;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.codahale.metrics.Meter;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Timer;
import com.datastax.driver.core.RegularStatement;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.ResultSetFuture;
import com.datastax.driver.core.Statement;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.google.common.base.Stopwatch;
import brave.Span;
import brave.Tracer;
import brave.propagation.TraceContext;
import net.helenus.core.AbstractSessionOperations; import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.UnitOfWork; import net.helenus.core.UnitOfWork;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
import net.helenus.support.HelenusException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class Operation<E> { public abstract class Operation<E> {
private static final Logger LOG = LoggerFactory.getLogger(Operation.class); private static final Logger LOG = LoggerFactory.getLogger(Operation.class);
protected final AbstractSessionOperations sessionOps; protected final AbstractSessionOperations sessionOps;
protected final Meter uowCacheHits; protected final Meter uowCacheHits;
protected final Meter uowCacheMiss; protected final Meter uowCacheMiss;
protected final Meter sessionCacheHits; protected final Meter sessionCacheHits;
protected final Meter sessionCacheMiss; protected final Meter sessionCacheMiss;
protected final Meter cacheHits; protected final Meter cacheHits;
protected final Meter cacheMiss; protected final Meter cacheMiss;
protected final Timer requestLatency; protected final Timer requestLatency;
Operation(AbstractSessionOperations sessionOperations) { Operation(AbstractSessionOperations sessionOperations) {
this.sessionOps = sessionOperations; this.sessionOps = sessionOperations;
MetricRegistry metrics = sessionOperations.getMetricRegistry(); MetricRegistry metrics = sessionOperations.getMetricRegistry();
if (metrics == null) { if (metrics == null) {
metrics = new MetricRegistry(); metrics = new MetricRegistry();
} }
this.uowCacheHits = metrics.meter("net.helenus.UOW-cache-hits"); this.uowCacheHits = metrics.meter("net.helenus.UOW-cache-hits");
this.uowCacheMiss = metrics.meter("net.helenus.UOW-cache-miss"); this.uowCacheMiss = metrics.meter("net.helenus.UOW-cache-miss");
this.sessionCacheHits = metrics.meter("net.helenus.session-cache-hits"); this.sessionCacheHits = metrics.meter("net.helenus.session-cache-hits");
this.sessionCacheMiss = metrics.meter("net.helenus.session-cache-miss"); this.sessionCacheMiss = metrics.meter("net.helenus.session-cache-miss");
this.cacheHits = metrics.meter("net.helenus.cache-hits"); this.cacheHits = metrics.meter("net.helenus.cache-hits");
this.cacheMiss = metrics.meter("net.helenus.cache-miss"); this.cacheMiss = metrics.meter("net.helenus.cache-miss");
this.requestLatency = metrics.timer("net.helenus.request-latency"); this.requestLatency = metrics.timer("net.helenus.request-latency");
} }
public static String queryString(Statement statement, boolean includeValues) { public static String queryString(Statement statement, boolean includeValues) {
String query = null; String query = null;
if (statement instanceof BuiltStatement) { if (statement instanceof BuiltStatement) {
BuiltStatement builtStatement = (BuiltStatement) statement; BuiltStatement builtStatement = (BuiltStatement) statement;
if (includeValues) { if (includeValues) {
RegularStatement regularStatement = builtStatement.setForceNoValues(true); RegularStatement regularStatement = builtStatement.setForceNoValues(true);
query = regularStatement.getQueryString(); query = regularStatement.getQueryString();
} else { } else {
query = builtStatement.getQueryString(); query = builtStatement.getQueryString();
} }
} else if (statement instanceof RegularStatement) { } else if (statement instanceof RegularStatement) {
RegularStatement regularStatement = (RegularStatement) statement; RegularStatement regularStatement = (RegularStatement) statement;
query = regularStatement.getQueryString(); query = regularStatement.getQueryString();
} else { } else {
query = statement.toString(); query = statement.toString();
}
return query;
}
} public ResultSet execute(
return query; AbstractSessionOperations session,
} UnitOfWork uow,
TraceContext traceContext,
long timeout,
TimeUnit units,
boolean showValues,
boolean cached)
throws TimeoutException {
public ResultSet execute(AbstractSessionOperations session, UnitOfWork uow, TraceContext traceContext, long timeout, // Start recording in a Zipkin sub-span our execution time to perform this
TimeUnit units, boolean showValues, boolean cached) throws TimeoutException { // operation.
Tracer tracer = session.getZipkinTracer();
Span span = null;
if (tracer != null && traceContext != null) {
span = tracer.newChild(traceContext);
}
// Start recording in a Zipkin sub-span our execution time to perform this try {
// operation.
Tracer tracer = session.getZipkinTracer();
Span span = null;
if (tracer != null && traceContext != null) {
span = tracer.newChild(traceContext);
}
try { if (span != null) {
span.name("cassandra");
span.start();
}
if (span != null) { Statement statement = options(buildStatement(cached));
span.name("cassandra"); Stopwatch timer = Stopwatch.createStarted();
span.start(); try {
} ResultSetFuture futureResultSet = session.executeAsync(statement, uow, timer, showValues);
if (uow != null) uow.recordCacheAndDatabaseOperationCount(0, 1);
ResultSet resultSet = futureResultSet.getUninterruptibly(timeout, units);
ColumnDefinitions columnDefinitions = resultSet.getColumnDefinitions();
if (LOG.isDebugEnabled()) {
ExecutionInfo ei = resultSet.getExecutionInfo();
Host qh = ei.getQueriedHost();
String oh =
ei.getTriedHosts()
.stream()
.map(Host::getAddress)
.map(InetAddress::toString)
.collect(Collectors.joining(", "));
ConsistencyLevel cl = ei.getAchievedConsistencyLevel();
int se = ei.getSpeculativeExecutions();
String warn = ei.getWarnings().stream().collect(Collectors.joining(", "));
String ri =
String.format(
"%s %s %s %s %s %s%sspec-retries: %d",
"server v" + qh.getCassandraVersion(),
qh.getAddress().toString(),
(oh != null && !oh.equals("")) ? " [tried: " + oh + "]" : "",
qh.getDatacenter(),
qh.getRack(),
(cl != null)
? (" consistency: "
+ cl.name()
+ (cl.isDCLocal() ? " DC " : "")
+ (cl.isSerial() ? " SC " : ""))
: "",
(warn != null && !warn.equals("")) ? ": " + warn : "",
se);
if (uow != null) uow.setInfo(ri);
else LOG.debug(ri);
}
if (!resultSet.wasApplied()
&& !(columnDefinitions.size() > 1 || !columnDefinitions.contains("[applied]"))) {
throw new HelenusException("Operation Failed");
}
return resultSet;
Statement statement = options(buildStatement(cached)); } finally {
Stopwatch timer = Stopwatch.createStarted(); timer.stop();
try { if (uow != null) uow.addDatabaseTime("Cassandra", timer);
ResultSetFuture futureResultSet = session.executeAsync(statement, uow, timer, showValues); log(statement, uow, timer, showValues);
if (uow != null) }
uow.recordCacheAndDatabaseOperationCount(0, 1);
ResultSet resultSet = futureResultSet.getUninterruptibly(timeout, units);
return resultSet;
} finally { } finally {
timer.stop();
if (uow != null)
uow.addDatabaseTime("Cassandra", timer);
log(statement, uow, timer, showValues);
}
} finally { if (span != null) {
span.finish();
}
}
}
if (span != null) { void log(Statement statement, UnitOfWork uow, Stopwatch timer, boolean showValues) {
span.finish(); if (LOG.isInfoEnabled()) {
} String uowString = "";
} if (uow != null) {
} uowString = "UOW(" + uow.hashCode() + ")";
}
String timerString = "";
if (timer != null) {
timerString = String.format(" %s ", timer.toString());
}
LOG.info(
String.format("%s%s%s", uowString, timerString, Operation.queryString(statement, false)));
}
}
void log(Statement statement, UnitOfWork uow, Stopwatch timer, boolean showValues) { public Statement options(Statement statement) {
if (LOG.isInfoEnabled()) { return statement;
String uowString = ""; }
if (uow != null) {
uowString = "UOW(" + uow.hashCode() + ")";
}
String timerString = "";
if (timer != null) {
timerString = String.format(" %s ", timer.toString());
}
LOG.info(String.format("%s%s%s", uowString, timerString, Operation.queryString(statement, false)));
}
}
public Statement options(Statement statement) { public Statement buildStatement(boolean cached) {
return statement; return null;
} }
public Statement buildStatement(boolean cached) { public List<Facet> getFacets() {
return null; return new ArrayList<Facet>();
} }
public List<Facet> getFacets() { public List<Facet> bindFacetValues() {
return new ArrayList<Facet>(); return null;
} }
public List<Facet> bindFacetValues() {
return null;
}
public boolean isSessionCacheable() {
return false;
}
public boolean isSessionCacheable() {
return false;
}
} }

View file

@ -20,28 +20,27 @@ import com.datastax.driver.core.PreparedStatement;
public final class PreparedOperation<E> { public final class PreparedOperation<E> {
private final PreparedStatement preparedStatement; private final PreparedStatement preparedStatement;
private final AbstractOperation<E, ?> operation; private final AbstractOperation<E, ?> operation;
public PreparedOperation(PreparedStatement statement, AbstractOperation<E, ?> operation) { public PreparedOperation(PreparedStatement statement, AbstractOperation<E, ?> operation) {
this.preparedStatement = statement; this.preparedStatement = statement;
this.operation = operation; this.operation = operation;
} }
public PreparedStatement getPreparedStatement() { public PreparedStatement getPreparedStatement() {
return preparedStatement; return preparedStatement;
} }
public BoundOperation<E> bind(Object... params) { public BoundOperation<E> bind(Object... params) {
BoundStatement boundStatement = preparedStatement.bind(params); BoundStatement boundStatement = preparedStatement.bind(params);
return new BoundOperation<E>(boundStatement, operation); return new BoundOperation<E>(boundStatement, operation);
} }
@Override
public String toString() {
return preparedStatement.getQueryString();
}
@Override
public String toString() {
return preparedStatement.getQueryString();
}
} }

View file

@ -20,27 +20,28 @@ import com.datastax.driver.core.PreparedStatement;
public final class PreparedOptionalOperation<E> { public final class PreparedOptionalOperation<E> {
private final PreparedStatement preparedStatement; private final PreparedStatement preparedStatement;
private final AbstractOptionalOperation<E, ?> operation; private final AbstractOptionalOperation<E, ?> operation;
public PreparedOptionalOperation(PreparedStatement statement, AbstractOptionalOperation<E, ?> operation) { public PreparedOptionalOperation(
this.preparedStatement = statement; PreparedStatement statement, AbstractOptionalOperation<E, ?> operation) {
this.operation = operation; this.preparedStatement = statement;
} this.operation = operation;
}
public PreparedStatement getPreparedStatement() { public PreparedStatement getPreparedStatement() {
return preparedStatement; return preparedStatement;
} }
public BoundOptionalOperation<E> bind(Object... params) { public BoundOptionalOperation<E> bind(Object... params) {
BoundStatement boundStatement = preparedStatement.bind(params); BoundStatement boundStatement = preparedStatement.bind(params);
return new BoundOptionalOperation<E>(boundStatement, operation); return new BoundOptionalOperation<E>(boundStatement, operation);
} }
@Override @Override
public String toString() { public String toString() {
return preparedStatement.getQueryString(); return preparedStatement.getQueryString();
} }
} }

View file

@ -20,25 +20,26 @@ import com.datastax.driver.core.PreparedStatement;
public final class PreparedStreamOperation<E> { public final class PreparedStreamOperation<E> {
private final PreparedStatement preparedStatement; private final PreparedStatement preparedStatement;
private final AbstractStreamOperation<E, ?> operation; private final AbstractStreamOperation<E, ?> operation;
public PreparedStreamOperation(PreparedStatement statement, AbstractStreamOperation<E, ?> operation) { public PreparedStreamOperation(
this.preparedStatement = statement; PreparedStatement statement, AbstractStreamOperation<E, ?> operation) {
this.operation = operation; this.preparedStatement = statement;
} this.operation = operation;
}
public PreparedStatement getPreparedStatement() { public PreparedStatement getPreparedStatement() {
return preparedStatement; return preparedStatement;
} }
public BoundStreamOperation<E> bind(Object... params) { public BoundStreamOperation<E> bind(Object... params) {
BoundStatement boundStatement = preparedStatement.bind(params); BoundStatement boundStatement = preparedStatement.bind(params);
return new BoundStreamOperation<E>(boundStatement, operation); return new BoundStreamOperation<E>(boundStatement, operation);
} }
@Override @Override
public String toString() { public String toString() {
return preparedStatement.getQueryString(); return preparedStatement.getQueryString();
} }
} }

View file

@ -15,53 +15,52 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
import java.util.function.Function; import java.util.function.Function;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
public final class SelectFirstOperation<E> extends AbstractFilterOptionalOperation<E, SelectFirstOperation<E>> { public final class SelectFirstOperation<E>
extends AbstractFilterOptionalOperation<E, SelectFirstOperation<E>> {
private final SelectOperation<E> delegate; private final SelectOperation<E> delegate;
public SelectFirstOperation(SelectOperation<E> delegate) { public SelectFirstOperation(SelectOperation<E> delegate) {
super(delegate.sessionOps); super(delegate.sessionOps);
this.delegate = delegate; this.delegate = delegate;
this.filters = delegate.filters; this.filters = delegate.filters;
this.ifFilters = delegate.ifFilters; this.ifFilters = delegate.ifFilters;
} }
public <R> SelectFirstTransformingOperation<R, E> map(Function<E, R> fn) { public <R> SelectFirstTransformingOperation<R, E> map(Function<E, R> fn) {
return new SelectFirstTransformingOperation<R, E>(delegate, fn); return new SelectFirstTransformingOperation<R, E>(delegate, fn);
} }
@Override @Override
public BuiltStatement buildStatement(boolean cached) { public BuiltStatement buildStatement(boolean cached) {
return delegate.buildStatement(cached); return delegate.buildStatement(cached);
} }
@Override @Override
public List<Facet> getFacets() { public List<Facet> getFacets() {
return delegate.getFacets(); return delegate.getFacets();
} }
@Override @Override
public List<Facet> bindFacetValues() { public List<Facet> bindFacetValues() {
return delegate.bindFacetValues(); return delegate.bindFacetValues();
} }
@Override @Override
public Optional<E> transform(ResultSet resultSet) { public Optional<E> transform(ResultSet resultSet) {
return delegate.transform(resultSet).findFirst(); return delegate.transform(resultSet).findFirst();
} }
@Override @Override
public boolean isSessionCacheable() { public boolean isSessionCacheable() {
return delegate.isSessionCacheable(); return delegate.isSessionCacheable();
} }
} }

View file

@ -15,48 +15,45 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
import java.util.function.Function; import java.util.function.Function;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
public final class SelectFirstTransformingOperation<R, E> public final class SelectFirstTransformingOperation<R, E>
extends extends AbstractFilterOptionalOperation<R, SelectFirstTransformingOperation<R, E>> {
AbstractFilterOptionalOperation<R, SelectFirstTransformingOperation<R, E>> {
private final SelectOperation<E> delegate; private final SelectOperation<E> delegate;
private final Function<E, R> fn; private final Function<E, R> fn;
public SelectFirstTransformingOperation(SelectOperation<E> delegate, Function<E, R> fn) { public SelectFirstTransformingOperation(SelectOperation<E> delegate, Function<E, R> fn) {
super(delegate.sessionOps); super(delegate.sessionOps);
this.delegate = delegate; this.delegate = delegate;
this.fn = fn; this.fn = fn;
this.filters = delegate.filters; this.filters = delegate.filters;
this.ifFilters = delegate.ifFilters; this.ifFilters = delegate.ifFilters;
} }
@Override @Override
public List<Facet> bindFacetValues() { public List<Facet> bindFacetValues() {
return delegate.bindFacetValues(); return delegate.bindFacetValues();
} }
@Override @Override
public BuiltStatement buildStatement(boolean cached) { public BuiltStatement buildStatement(boolean cached) {
return delegate.buildStatement(cached); return delegate.buildStatement(cached);
} }
@Override @Override
public Optional<R> transform(ResultSet resultSet) { public Optional<R> transform(ResultSet resultSet) {
return delegate.transform(resultSet).findFirst().map(fn); return delegate.transform(resultSet).findFirst().map(fn);
} }
@Override @Override
public boolean isSessionCacheable() { public boolean isSessionCacheable() {
return delegate.isSessionCacheable(); return delegate.isSessionCacheable();
} }
} }

View file

@ -15,14 +15,6 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import java.util.*;
import java.util.function.Function;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.Row; import com.datastax.driver.core.Row;
import com.datastax.driver.core.querybuilder.BuiltStatement; import com.datastax.driver.core.querybuilder.BuiltStatement;
@ -32,7 +24,10 @@ import com.datastax.driver.core.querybuilder.Select;
import com.datastax.driver.core.querybuilder.Select.Selection; import com.datastax.driver.core.querybuilder.Select.Selection;
import com.datastax.driver.core.querybuilder.Select.Where; import com.datastax.driver.core.querybuilder.Select.Where;
import com.google.common.collect.Iterables; import com.google.common.collect.Iterables;
import java.util.*;
import java.util.function.Function;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import net.helenus.core.*; import net.helenus.core.*;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
import net.helenus.core.cache.UnboundFacet; import net.helenus.core.cache.UnboundFacet;
@ -45,285 +40,326 @@ import net.helenus.mapping.value.ColumnValueProvider;
import net.helenus.mapping.value.ValueProviderMap; import net.helenus.mapping.value.ValueProviderMap;
import net.helenus.support.Fun; import net.helenus.support.Fun;
import net.helenus.support.HelenusMappingException; import net.helenus.support.HelenusMappingException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, SelectOperation<E>> { public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, SelectOperation<E>> {
private static final Logger LOG = LoggerFactory.getLogger(SelectOperation.class); private static final Logger LOG = LoggerFactory.getLogger(SelectOperation.class);
protected final List<HelenusPropertyNode> props = new ArrayList<HelenusPropertyNode>(); protected final List<HelenusPropertyNode> props = new ArrayList<HelenusPropertyNode>();
protected Function<Row, E> rowMapper = null; protected Function<Row, E> rowMapper = null;
protected List<Ordering> ordering = null; protected List<Ordering> ordering = null;
protected Integer limit = null; protected Integer limit = null;
protected boolean allowFiltering = false; protected boolean allowFiltering = false;
protected String alternateTableName = null; protected String alternateTableName = null;
protected boolean isCacheable = false; protected boolean isCacheable = false;
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public SelectOperation(AbstractSessionOperations sessionOperations) { public SelectOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations); super(sessionOperations);
this.rowMapper = new Function<Row, E>() { this.rowMapper =
new Function<Row, E>() {
@Override @Override
public E apply(Row source) { public E apply(Row source) {
ColumnValueProvider valueProvider = sessionOps.getValueProvider(); ColumnValueProvider valueProvider = sessionOps.getValueProvider();
Object[] arr = new Object[props.size()]; Object[] arr = new Object[props.size()];
int i = 0; int i = 0;
for (HelenusPropertyNode p : props) { for (HelenusPropertyNode p : props) {
Object value = valueProvider.getColumnValue(source, -1, p.getProperty()); Object value = valueProvider.getColumnValue(source, -1, p.getProperty());
arr[i++] = value; arr[i++] = value;
} }
return (E) Fun.ArrayTuple.of(arr); return (E) Fun.ArrayTuple.of(arr);
} }
}; };
} }
public SelectOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity) { public SelectOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity) {
super(sessionOperations); super(sessionOperations);
entity.getOrderedProperties().stream().map(p -> new HelenusPropertyNode(p, Optional.empty())) entity
.forEach(p -> this.props.add(p)); .getOrderedProperties()
.stream()
.map(p -> new HelenusPropertyNode(p, Optional.empty()))
.forEach(p -> this.props.add(p));
isCacheable = entity.isCacheable(); isCacheable = entity.isCacheable();
} }
public SelectOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity, public SelectOperation(
Function<Row, E> rowMapper) { AbstractSessionOperations sessionOperations,
HelenusEntity entity,
Function<Row, E> rowMapper) {
super(sessionOperations); super(sessionOperations);
this.rowMapper = rowMapper; this.rowMapper = rowMapper;
entity.getOrderedProperties().stream().map(p -> new HelenusPropertyNode(p, Optional.empty())) entity
.forEach(p -> this.props.add(p)); .getOrderedProperties()
.stream()
.map(p -> new HelenusPropertyNode(p, Optional.empty()))
.forEach(p -> this.props.add(p));
isCacheable = entity.isCacheable(); isCacheable = entity.isCacheable();
} }
public SelectOperation(AbstractSessionOperations sessionOperations, Function<Row, E> rowMapper, public SelectOperation(
HelenusPropertyNode... props) { AbstractSessionOperations sessionOperations,
Function<Row, E> rowMapper,
HelenusPropertyNode... props) {
super(sessionOperations); super(sessionOperations);
this.rowMapper = rowMapper; this.rowMapper = rowMapper;
Collections.addAll(this.props, props); Collections.addAll(this.props, props);
} }
public CountOperation count() { public CountOperation count() {
HelenusEntity entity = null; HelenusEntity entity = null;
for (HelenusPropertyNode prop : props) { for (HelenusPropertyNode prop : props) {
if (entity == null) { if (entity == null) {
entity = prop.getEntity(); entity = prop.getEntity();
} else if (entity != prop.getEntity()) { } else if (entity != prop.getEntity()) {
throw new HelenusMappingException("you can count records only from a single entity " throw new HelenusMappingException(
+ entity.getMappingInterface() + " or " + prop.getEntity().getMappingInterface()); "you can count records only from a single entity "
} + entity.getMappingInterface()
} + " or "
+ prop.getEntity().getMappingInterface());
}
}
return new CountOperation(sessionOps, entity); return new CountOperation(sessionOps, entity);
} }
public <V extends E> SelectOperation<E> from(Class<V> materializedViewClass) { public <V extends E> SelectOperation<E> from(Class<V> materializedViewClass) {
Objects.requireNonNull(materializedViewClass); Objects.requireNonNull(materializedViewClass);
HelenusEntity entity = Helenus.entity(materializedViewClass); HelenusEntity entity = Helenus.entity(materializedViewClass);
this.alternateTableName = entity.getName().toCql(); this.alternateTableName = entity.getName().toCql();
this.props.clear(); this.props.clear();
entity.getOrderedProperties().stream().map(p -> new HelenusPropertyNode(p, Optional.empty())) entity
.forEach(p -> this.props.add(p)); .getOrderedProperties()
return this; .stream()
} .map(p -> new HelenusPropertyNode(p, Optional.empty()))
.forEach(p -> this.props.add(p));
return this;
}
public SelectFirstOperation<E> single() { public SelectFirstOperation<E> single() {
limit(1); limit(1);
return new SelectFirstOperation<E>(this); return new SelectFirstOperation<E>(this);
} }
public <R> SelectTransformingOperation<R, E> mapTo(Class<R> entityClass) { public <R> SelectTransformingOperation<R, E> mapTo(Class<R> entityClass) {
Objects.requireNonNull(entityClass, "entityClass is null"); Objects.requireNonNull(entityClass, "entityClass is null");
HelenusEntity entity = Helenus.entity(entityClass); HelenusEntity entity = Helenus.entity(entityClass);
this.rowMapper = null; this.rowMapper = null;
return new SelectTransformingOperation<R, E>(this, (r) -> { return new SelectTransformingOperation<R, E>(
Map<String, Object> map = new ValueProviderMap(r, sessionOps.getValueProvider(), entity); this,
return (R) Helenus.map(entityClass, map); (r) -> {
}); Map<String, Object> map = new ValueProviderMap(r, sessionOps.getValueProvider(), entity);
} return (R) Helenus.map(entityClass, map);
});
}
public <R> SelectTransformingOperation<R, E> map(Function<E, R> fn) { public <R> SelectTransformingOperation<R, E> map(Function<E, R> fn) {
return new SelectTransformingOperation<R, E>(this, fn); return new SelectTransformingOperation<R, E>(this, fn);
} }
public SelectOperation<E> column(Getter<?> getter) { public SelectOperation<E> column(Getter<?> getter) {
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(getter); HelenusPropertyNode p = MappingUtil.resolveMappingProperty(getter);
this.props.add(p); this.props.add(p);
return this; return this;
} }
public SelectOperation<E> orderBy(Getter<?> getter, OrderingDirection direction) { public SelectOperation<E> orderBy(Getter<?> getter, OrderingDirection direction) {
getOrCreateOrdering().add(new Ordered(getter, direction).getOrdering()); getOrCreateOrdering().add(new Ordered(getter, direction).getOrdering());
return this; return this;
} }
public SelectOperation<E> orderBy(Ordered ordered) { public SelectOperation<E> orderBy(Ordered ordered) {
getOrCreateOrdering().add(ordered.getOrdering()); getOrCreateOrdering().add(ordered.getOrdering());
return this; return this;
} }
public SelectOperation<E> limit(Integer limit) { public SelectOperation<E> limit(Integer limit) {
this.limit = limit; this.limit = limit;
return this; return this;
} }
public SelectOperation<E> allowFiltering() { public SelectOperation<E> allowFiltering() {
this.allowFiltering = true; this.allowFiltering = true;
return this; return this;
} }
@Override @Override
public boolean isSessionCacheable() { public boolean isSessionCacheable() {
return isCacheable; return isCacheable;
} }
@Override @Override
public List<Facet> getFacets() { public List<Facet> getFacets() {
HelenusEntity entity = props.get(0).getEntity(); HelenusEntity entity = props.get(0).getEntity();
return entity.getFacets(); return entity.getFacets();
} }
@Override @Override
public List<Facet> bindFacetValues() { public List<Facet> bindFacetValues() {
HelenusEntity entity = props.get(0).getEntity(); HelenusEntity entity = props.get(0).getEntity();
List<Facet> boundFacets = new ArrayList<>(); List<Facet> boundFacets = new ArrayList<>();
for (Facet facet : entity.getFacets()) { for (Facet facet : entity.getFacets()) {
if (facet instanceof UnboundFacet) { if (facet instanceof UnboundFacet) {
UnboundFacet unboundFacet = (UnboundFacet) facet; UnboundFacet unboundFacet = (UnboundFacet) facet;
UnboundFacet.Binder binder = unboundFacet.binder(); UnboundFacet.Binder binder = unboundFacet.binder();
for (HelenusProperty prop : unboundFacet.getProperties()) { for (HelenusProperty prop : unboundFacet.getProperties()) {
if (filters != null) { if (filters != null) {
Filter filter = filters.get(prop); Filter filter = filters.get(prop);
if (filter != null) { if (filter != null) {
Object[] postulates = filter.postulateValues(); Object[] postulates = filter.postulateValues();
for (Object p : postulates) { for (Object p : postulates) {
binder.setValueForProperty(prop, p.toString()); binder.setValueForProperty(prop, p.toString());
} }
} }
} }
}
if (binder.isBound()) {
boundFacets.add(binder.bind());
}
} else {
boundFacets.add(facet);
}
}
return boundFacets;
}
} @Override
if (binder.isBound()) { public BuiltStatement buildStatement(boolean cached) {
boundFacets.add(binder.bind());
}
} else {
boundFacets.add(facet);
}
}
return boundFacets;
}
@Override HelenusEntity entity = null;
public BuiltStatement buildStatement(boolean cached) { Selection selection = QueryBuilder.select();
HelenusEntity entity = null; for (HelenusPropertyNode prop : props) {
Selection selection = QueryBuilder.select(); String columnName = prop.getColumnName();
selection = selection.column(columnName);
for (HelenusPropertyNode prop : props) { if (entity == null) {
String columnName = prop.getColumnName(); entity = prop.getEntity();
selection = selection.column(columnName); } else if (entity != prop.getEntity()) {
throw new HelenusMappingException(
"you can select columns only from a single entity "
+ entity.getMappingInterface()
+ " or "
+ prop.getEntity().getMappingInterface());
}
if (prop.getProperty().caseSensitiveIndex()) { // TODO(gburd): writeTime and ttl will be useful on merge() but cause object
allowFiltering = true; // identity to fail.
} if (false && cached) {
switch (prop.getProperty().getColumnType()) {
case PARTITION_KEY:
case CLUSTERING_COLUMN:
break;
default:
if (entity.equals(prop.getEntity())) {
if (prop.getNext().isPresent()) {
columnName = Iterables.getLast(prop).getColumnName().toCql(true);
}
if (!prop.getProperty().getDataType().isCollectionType()) {
selection.writeTime(columnName).as(columnName + "_writeTime");
selection.ttl(columnName).as(columnName + "_ttl");
}
}
break;
}
}
}
if (entity == null) { if (entity == null) {
entity = prop.getEntity(); throw new HelenusMappingException("no entity or table to select data");
} else if (entity != prop.getEntity()) { }
throw new HelenusMappingException("you can select columns only from a single entity "
+ entity.getMappingInterface() + " or " + prop.getEntity().getMappingInterface());
}
// TODO(gburd): writeTime and ttl will be useful on merge() but cause object String tableName = alternateTableName == null ? entity.getName().toCql() : alternateTableName;
// identity to fail. Select select = selection.from(tableName);
if (false && cached) {
switch (prop.getProperty().getColumnType()) {
case PARTITION_KEY :
case CLUSTERING_COLUMN :
break;
default :
if (entity.equals(prop.getEntity())) {
if (prop.getNext().isPresent()) {
columnName = Iterables.getLast(prop).getColumnName().toCql(true);
}
if (!prop.getProperty().getDataType().isCollectionType()) {
selection.writeTime(columnName).as(columnName + "_writeTime");
selection.ttl(columnName).as(columnName + "_ttl");
}
}
break;
}
}
}
if (entity == null) { if (ordering != null && !ordering.isEmpty()) {
throw new HelenusMappingException("no entity or table to select data"); select.orderBy(ordering.toArray(new Ordering[ordering.size()]));
} }
String tableName = alternateTableName == null ? entity.getName().toCql() : alternateTableName; if (limit != null) {
Select select = selection.from(tableName); select.limit(limit);
}
if (ordering != null && !ordering.isEmpty()) { if (filters != null && !filters.isEmpty()) {
select.orderBy(ordering.toArray(new Ordering[ordering.size()]));
}
if (limit != null) { Where where = select.where();
select.limit(limit);
}
if (filters != null && !filters.isEmpty()) { boolean isFirstIndex = true;
for (Filter<?> filter : filters.values()) {
where.and(filter.getClause(sessionOps.getValuePreparer()));
HelenusProperty prop = filter.getNode().getProperty();
if (allowFiltering == false) {
switch (prop.getColumnType()) {
case PARTITION_KEY:
case CLUSTERING_COLUMN:
break;
default:
// When using non-Cassandra-standard 2i types or when using more than one
// indexed column or non-indexed columns the query must include ALLOW FILTERING.
if (prop.caseSensitiveIndex()) {
allowFiltering = true;
} else if (prop.getIndexName() != null) {
allowFiltering |= !isFirstIndex;
isFirstIndex = false;
} else {
allowFiltering = true;
}
}
}
}
}
Where where = select.where(); if (ifFilters != null && !ifFilters.isEmpty()) {
LOG.error("onlyIf conditions " + ifFilters + " would be ignored in the statement " + select);
}
for (Filter<?> filter : filters.values()) { if (allowFiltering) {
where.and(filter.getClause(sessionOps.getValuePreparer())); select.allowFiltering();
} }
}
if (ifFilters != null && !ifFilters.isEmpty()) { return select;
LOG.error("onlyIf conditions " + ifFilters + " would be ignored in the statement " + select); }
}
if (allowFiltering) { @SuppressWarnings("unchecked")
select.allowFiltering(); @Override
} public Stream<E> transform(ResultSet resultSet) {
if (rowMapper != null) {
return StreamSupport.stream(
Spliterators.spliteratorUnknownSize(resultSet.iterator(), Spliterator.ORDERED), false)
.map(rowMapper);
} else {
return (Stream<E>)
StreamSupport.stream(
Spliterators.spliteratorUnknownSize(resultSet.iterator(), Spliterator.ORDERED),
false);
}
}
return select; private List<Ordering> getOrCreateOrdering() {
} if (ordering == null) {
ordering = new ArrayList<Ordering>();
@SuppressWarnings("unchecked") }
@Override return ordering;
public Stream<E> transform(ResultSet resultSet) { }
if (rowMapper != null) {
return StreamSupport
.stream(Spliterators.spliteratorUnknownSize(resultSet.iterator(), Spliterator.ORDERED), false)
.map(rowMapper);
} else {
return (Stream<E>) StreamSupport
.stream(Spliterators.spliteratorUnknownSize(resultSet.iterator(), Spliterator.ORDERED), false);
}
}
private List<Ordering> getOrCreateOrdering() {
if (ordering == null) {
ordering = new ArrayList<Ordering>();
}
return ordering;
}
} }

View file

@ -15,48 +15,45 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import java.util.List; import java.util.List;
import java.util.function.Function; import java.util.function.Function;
import java.util.stream.Stream; import java.util.stream.Stream;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
public final class SelectTransformingOperation<R, E> public final class SelectTransformingOperation<R, E>
extends extends AbstractFilterStreamOperation<R, SelectTransformingOperation<R, E>> {
AbstractFilterStreamOperation<R, SelectTransformingOperation<R, E>> {
private final SelectOperation<E> delegate; private final SelectOperation<E> delegate;
private final Function<E, R> fn; private final Function<E, R> fn;
public SelectTransformingOperation(SelectOperation<E> delegate, Function<E, R> fn) { public SelectTransformingOperation(SelectOperation<E> delegate, Function<E, R> fn) {
super(delegate.sessionOps); super(delegate.sessionOps);
this.delegate = delegate; this.delegate = delegate;
this.fn = fn; this.fn = fn;
this.filters = delegate.filters; this.filters = delegate.filters;
this.ifFilters = delegate.ifFilters; this.ifFilters = delegate.ifFilters;
} }
@Override @Override
public List<Facet> bindFacetValues() { public List<Facet> bindFacetValues() {
return delegate.bindFacetValues(); return delegate.bindFacetValues();
} }
@Override @Override
public List<Facet> getFacets() { public List<Facet> getFacets() {
return delegate.getFacets(); return delegate.getFacets();
} }
@Override @Override
public BuiltStatement buildStatement(boolean cached) { public BuiltStatement buildStatement(boolean cached) {
return delegate.buildStatement(cached); return delegate.buildStatement(cached);
} }
@Override @Override
public Stream<R> transform(ResultSet resultSet) { public Stream<R> transform(ResultSet resultSet) {
return delegate.transform(resultSet).map(fn); return delegate.transform(resultSet).map(fn);
} }
} }

File diff suppressed because it is too large Load diff

View file

@ -19,34 +19,41 @@ import java.util.HashMap;
import java.util.Map; import java.util.Map;
public enum DefaultPrimitiveTypes { public enum DefaultPrimitiveTypes {
BOOLEAN(boolean.class, false), BYTE(byte.class, (byte) 0x0), CHAR(char.class, (char) 0x0), SHORT(short.class, BOOLEAN(boolean.class, false),
(short) 0), INT(int.class, 0), LONG(long.class, 0L), FLOAT(float.class, 0.0f), DOUBLE(double.class, 0.0); BYTE(byte.class, (byte) 0x0),
CHAR(char.class, (char) 0x0),
SHORT(short.class, (short) 0),
INT(int.class, 0),
LONG(long.class, 0L),
FLOAT(float.class, 0.0f),
DOUBLE(double.class, 0.0);
private static final Map<Class<?>, DefaultPrimitiveTypes> map = new HashMap<Class<?>, DefaultPrimitiveTypes>(); private static final Map<Class<?>, DefaultPrimitiveTypes> map =
new HashMap<Class<?>, DefaultPrimitiveTypes>();
static { static {
for (DefaultPrimitiveTypes type : DefaultPrimitiveTypes.values()) { for (DefaultPrimitiveTypes type : DefaultPrimitiveTypes.values()) {
map.put(type.getPrimitiveClass(), type); map.put(type.getPrimitiveClass(), type);
} }
} }
private final Class<?> primitiveClass; private final Class<?> primitiveClass;
private final Object defaultValue; private final Object defaultValue;
private DefaultPrimitiveTypes(Class<?> primitiveClass, Object defaultValue) { private DefaultPrimitiveTypes(Class<?> primitiveClass, Object defaultValue) {
this.primitiveClass = primitiveClass; this.primitiveClass = primitiveClass;
this.defaultValue = defaultValue; this.defaultValue = defaultValue;
} }
public static DefaultPrimitiveTypes lookup(Class<?> primitiveClass) { public static DefaultPrimitiveTypes lookup(Class<?> primitiveClass) {
return map.get(primitiveClass); return map.get(primitiveClass);
} }
public Class<?> getPrimitiveClass() { public Class<?> getPrimitiveClass() {
return primitiveClass; return primitiveClass;
} }
public Object getDefaultValue() { public Object getDefaultValue() {
return defaultValue; return defaultValue;
} }
} }

View file

@ -19,7 +19,7 @@ import java.util.Set;
public interface Drafted<T> extends MapExportable { public interface Drafted<T> extends MapExportable {
Set<String> mutated(); Set<String> mutated();
T build(); T build();
} }

View file

@ -16,18 +16,17 @@
package net.helenus.core.reflect; package net.helenus.core.reflect;
import com.datastax.driver.core.Metadata; import com.datastax.driver.core.Metadata;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
public interface DslExportable { public interface DslExportable {
String GET_ENTITY_METHOD = "getHelenusMappingEntity"; String GET_ENTITY_METHOD = "getHelenusMappingEntity";
String GET_PARENT_METHOD = "getParentDslHelenusPropertyNode"; String GET_PARENT_METHOD = "getParentDslHelenusPropertyNode";
String SET_METADATA_METHOD = "setCassandraMetadataForHelenusSession"; String SET_METADATA_METHOD = "setCassandraMetadataForHelenusSession";
HelenusEntity getHelenusMappingEntity(); HelenusEntity getHelenusMappingEntity();
HelenusPropertyNode getParentDslHelenusPropertyNode(); HelenusPropertyNode getParentDslHelenusPropertyNode();
void setCassandraMetadataForHelenusSession(Metadata metadata); void setCassandraMetadataForHelenusSession(Metadata metadata);
} }

View file

@ -15,6 +15,7 @@
*/ */
package net.helenus.core.reflect; package net.helenus.core.reflect;
import com.datastax.driver.core.*;
import java.lang.reflect.InvocationHandler; import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.lang.reflect.Proxy; import java.lang.reflect.Proxy;
@ -22,9 +23,6 @@ import java.util.Collection;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.Optional; import java.util.Optional;
import com.datastax.driver.core.*;
import net.helenus.core.Helenus; import net.helenus.core.Helenus;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.HelenusMappingEntity; import net.helenus.mapping.HelenusMappingEntity;
@ -37,164 +35,178 @@ import net.helenus.support.HelenusException;
public class DslInvocationHandler<E> implements InvocationHandler { public class DslInvocationHandler<E> implements InvocationHandler {
private final Class<E> iface; private final Class<E> iface;
private final ClassLoader classLoader; private final ClassLoader classLoader;
private final Optional<HelenusPropertyNode> parent; private final Optional<HelenusPropertyNode> parent;
private final Map<Method, HelenusProperty> map = new HashMap<Method, HelenusProperty>(); private final Map<Method, HelenusProperty> map = new HashMap<Method, HelenusProperty>();
private final Map<Method, Object> udtMap = new HashMap<Method, Object>(); private final Map<Method, Object> udtMap = new HashMap<Method, Object>();
private final Map<Method, Object> tupleMap = new HashMap<Method, Object>(); private final Map<Method, Object> tupleMap = new HashMap<Method, Object>();
private HelenusEntity entity = null; private HelenusEntity entity = null;
private Metadata metadata = null; private Metadata metadata = null;
public DslInvocationHandler(Class<E> iface, ClassLoader classLoader, Optional<HelenusPropertyNode> parent, public DslInvocationHandler(
Metadata metadata) { Class<E> iface,
ClassLoader classLoader,
Optional<HelenusPropertyNode> parent,
Metadata metadata) {
this.metadata = metadata; this.metadata = metadata;
this.parent = parent; this.parent = parent;
this.iface = iface; this.iface = iface;
this.classLoader = classLoader; this.classLoader = classLoader;
} }
public void setCassandraMetadataForHelenusSession(Metadata metadata) { public void setCassandraMetadataForHelenusSession(Metadata metadata) {
if (metadata != null) { if (metadata != null) {
this.metadata = metadata; this.metadata = metadata;
entity = init(metadata); entity = init(metadata);
} }
} }
private HelenusEntity init(Metadata metadata) { private HelenusEntity init(Metadata metadata) {
HelenusEntity entity = new HelenusMappingEntity(iface, metadata); HelenusEntity entity = new HelenusMappingEntity(iface, metadata);
Collection<HelenusProperty> properties = entity.getOrderedProperties(); Collection<HelenusProperty> properties = entity.getOrderedProperties();
if (properties != null) { if (properties != null) {
for (HelenusProperty prop : properties) { for (HelenusProperty prop : properties) {
map.put(prop.getGetterMethod(), prop); map.put(prop.getGetterMethod(), prop);
AbstractDataType type = prop.getDataType(); AbstractDataType type = prop.getDataType();
Class<?> javaType = prop.getJavaType(); Class<?> javaType = prop.getJavaType();
if (type instanceof UDTDataType && !UDTValue.class.isAssignableFrom(javaType)) { if (type instanceof UDTDataType && !UDTValue.class.isAssignableFrom(javaType)) {
Object childDsl = Helenus.dsl(javaType, classLoader, Object childDsl =
Optional.of(new HelenusPropertyNode(prop, parent)), metadata); Helenus.dsl(
javaType,
classLoader,
Optional.of(new HelenusPropertyNode(prop, parent)),
metadata);
udtMap.put(prop.getGetterMethod(), childDsl); udtMap.put(prop.getGetterMethod(), childDsl);
} }
if (type instanceof DTDataType) { if (type instanceof DTDataType) {
DTDataType dataType = (DTDataType) type; DTDataType dataType = (DTDataType) type;
if (dataType.getDataType() instanceof TupleType && !TupleValue.class.isAssignableFrom(javaType)) { if (dataType.getDataType() instanceof TupleType
&& !TupleValue.class.isAssignableFrom(javaType)) {
Object childDsl = Helenus.dsl(javaType, classLoader, Object childDsl =
Optional.of(new HelenusPropertyNode(prop, parent)), metadata); Helenus.dsl(
javaType,
classLoader,
Optional.of(new HelenusPropertyNode(prop, parent)),
metadata);
tupleMap.put(prop.getGetterMethod(), childDsl); tupleMap.put(prop.getGetterMethod(), childDsl);
} }
} }
} }
} }
return entity; return entity;
} }
@Override @Override
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
HelenusEntity entity = this.entity; HelenusEntity entity = this.entity;
String methodName = method.getName(); String methodName = method.getName();
if ("equals".equals(methodName) && method.getParameterCount() == 1) { if ("equals".equals(methodName) && method.getParameterCount() == 1) {
Object otherObj = args[0]; Object otherObj = args[0];
if (otherObj == null) { if (otherObj == null) {
return false; return false;
} }
if (Proxy.isProxyClass(otherObj.getClass())) { if (Proxy.isProxyClass(otherObj.getClass())) {
return this == Proxy.getInvocationHandler(otherObj); return this == Proxy.getInvocationHandler(otherObj);
} }
return false; return false;
} }
if (DslExportable.SET_METADATA_METHOD.equals(methodName) && args.length == 1 && args[0] instanceof Metadata) { if (DslExportable.SET_METADATA_METHOD.equals(methodName)
if (metadata == null) { && args.length == 1
this.setCassandraMetadataForHelenusSession((Metadata) args[0]); && args[0] instanceof Metadata) {
} if (metadata == null) {
return null; this.setCassandraMetadataForHelenusSession((Metadata) args[0]);
} }
return null;
}
if (method.getParameterCount() != 0 || method.getReturnType() == void.class) { if (method.getParameterCount() != 0 || method.getReturnType() == void.class) {
throw new HelenusException("invalid getter method " + method); throw new HelenusException("invalid getter method " + method);
} }
if ("hashCode".equals(methodName)) { if ("hashCode".equals(methodName)) {
return hashCode(); return hashCode();
} }
if (DslExportable.GET_PARENT_METHOD.equals(methodName)) { if (DslExportable.GET_PARENT_METHOD.equals(methodName)) {
return parent.get(); return parent.get();
} }
if (entity == null) { if (entity == null) {
entity = init(metadata); entity = init(metadata);
} }
if ("toString".equals(methodName)) { if ("toString".equals(methodName)) {
return entity.toString(); return entity.toString();
} }
if (DslExportable.GET_ENTITY_METHOD.equals(methodName)) { if (DslExportable.GET_ENTITY_METHOD.equals(methodName)) {
return entity; return entity;
} }
HelenusProperty prop = map.get(method); HelenusProperty prop = map.get(method);
if (prop == null) { if (prop == null) {
prop = entity.getProperty(methodName); prop = entity.getProperty(methodName);
} }
if (prop != null) { if (prop != null) {
AbstractDataType type = prop.getDataType(); AbstractDataType type = prop.getDataType();
if (type instanceof UDTDataType) { if (type instanceof UDTDataType) {
Object childDsl = udtMap.get(method); Object childDsl = udtMap.get(method);
if (childDsl != null) { if (childDsl != null) {
return childDsl; return childDsl;
} }
} }
if (type instanceof DTDataType) { if (type instanceof DTDataType) {
DTDataType dataType = (DTDataType) type; DTDataType dataType = (DTDataType) type;
DataType dt = dataType.getDataType(); DataType dt = dataType.getDataType();
switch (dt.getName()) { switch (dt.getName()) {
case TUPLE : case TUPLE:
Object childDsl = tupleMap.get(method); Object childDsl = tupleMap.get(method);
if (childDsl != null) { if (childDsl != null) {
return childDsl; return childDsl;
} }
break; break;
case SET : case SET:
return new SetDsl(new HelenusPropertyNode(prop, parent)); return new SetDsl(new HelenusPropertyNode(prop, parent));
case LIST : case LIST:
return new ListDsl(new HelenusPropertyNode(prop, parent)); return new ListDsl(new HelenusPropertyNode(prop, parent));
case MAP : case MAP:
return new MapDsl(new HelenusPropertyNode(prop, parent)); return new MapDsl(new HelenusPropertyNode(prop, parent));
default : default:
break; break;
} }
} }
throw new DslPropertyException(new HelenusPropertyNode(prop, parent)); throw new DslPropertyException(new HelenusPropertyNode(prop, parent));
} }
throw new HelenusException("invalid method call " + method); throw new HelenusException("invalid method call " + method);
} }
} }

View file

@ -19,9 +19,7 @@ import java.lang.annotation.Annotation;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.Optional; import java.util.Optional;
import java.util.function.Function; import java.util.function.Function;
import javax.validation.ConstraintValidator; import javax.validation.ConstraintValidator;
import net.helenus.core.SessionRepository; import net.helenus.core.SessionRepository;
import net.helenus.mapping.*; import net.helenus.mapping.*;
import net.helenus.mapping.type.AbstractDataType; import net.helenus.mapping.type.AbstractDataType;
@ -29,79 +27,79 @@ import net.helenus.support.HelenusMappingException;
public final class HelenusNamedProperty implements HelenusProperty { public final class HelenusNamedProperty implements HelenusProperty {
private final String name; private final String name;
public HelenusNamedProperty(String name) { public HelenusNamedProperty(String name) {
this.name = name; this.name = name;
} }
@Override @Override
public HelenusEntity getEntity() { public HelenusEntity getEntity() {
throw new HelenusMappingException("will never called"); throw new HelenusMappingException("will never called");
} }
@Override @Override
public String getPropertyName() { public String getPropertyName() {
return name; return name;
} }
@Override @Override
public Method getGetterMethod() { public Method getGetterMethod() {
throw new HelenusMappingException("will never called"); throw new HelenusMappingException("will never called");
} }
@Override @Override
public IdentityName getColumnName() { public IdentityName getColumnName() {
return IdentityName.of(name, false); return IdentityName.of(name, false);
} }
@Override @Override
public Optional<IdentityName> getIndexName() { public Optional<IdentityName> getIndexName() {
return Optional.empty(); return Optional.empty();
} }
@Override @Override
public boolean caseSensitiveIndex() { public boolean caseSensitiveIndex() {
return false; return false;
} }
@Override @Override
public Class<?> getJavaType() { public Class<?> getJavaType() {
throw new HelenusMappingException("will never called"); throw new HelenusMappingException("will never called");
} }
@Override @Override
public AbstractDataType getDataType() { public AbstractDataType getDataType() {
throw new HelenusMappingException("will never called"); throw new HelenusMappingException("will never called");
} }
@Override @Override
public ColumnType getColumnType() { public ColumnType getColumnType() {
return ColumnType.COLUMN; return ColumnType.COLUMN;
} }
@Override @Override
public int getOrdinal() { public int getOrdinal() {
return 0; return 0;
} }
@Override @Override
public OrderingDirection getOrdering() { public OrderingDirection getOrdering() {
return OrderingDirection.ASC; return OrderingDirection.ASC;
} }
@Override @Override
public Optional<Function<Object, Object>> getReadConverter(SessionRepository repository) { public Optional<Function<Object, Object>> getReadConverter(SessionRepository repository) {
return Optional.empty(); return Optional.empty();
} }
@Override @Override
public Optional<Function<Object, Object>> getWriteConverter(SessionRepository repository) { public Optional<Function<Object, Object>> getWriteConverter(SessionRepository repository) {
return Optional.empty(); return Optional.empty();
} }
@Override @Override
public ConstraintValidator<? extends Annotation, ?>[] getValidators() { public ConstraintValidator<? extends Annotation, ?>[] getValidators() {
return MappingUtil.EMPTY_VALIDATORS; return MappingUtil.EMPTY_VALIDATORS;
} }
} }

View file

@ -17,90 +17,89 @@ package net.helenus.core.reflect;
import java.util.*; import java.util.*;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
public final class HelenusPropertyNode implements Iterable<HelenusProperty> { public final class HelenusPropertyNode implements Iterable<HelenusProperty> {
private final HelenusProperty prop; private final HelenusProperty prop;
private final Optional<HelenusPropertyNode> next; private final Optional<HelenusPropertyNode> next;
public HelenusPropertyNode(HelenusProperty prop, Optional<HelenusPropertyNode> next) { public HelenusPropertyNode(HelenusProperty prop, Optional<HelenusPropertyNode> next) {
this.prop = prop; this.prop = prop;
this.next = next; this.next = next;
} }
public String getColumnName() { public String getColumnName() {
if (next.isPresent()) { if (next.isPresent()) {
List<String> columnNames = new ArrayList<String>(); List<String> columnNames = new ArrayList<String>();
for (HelenusProperty p : this) { for (HelenusProperty p : this) {
columnNames.add(p.getColumnName().toCql(true)); columnNames.add(p.getColumnName().toCql(true));
} }
Collections.reverse(columnNames); Collections.reverse(columnNames);
if (prop instanceof HelenusNamedProperty) { if (prop instanceof HelenusNamedProperty) {
int size = columnNames.size(); int size = columnNames.size();
StringBuilder str = new StringBuilder(); StringBuilder str = new StringBuilder();
for (int i = 0; i != size - 1; ++i) { for (int i = 0; i != size - 1; ++i) {
if (str.length() != 0) { if (str.length() != 0) {
str.append("."); str.append(".");
} }
str.append(columnNames.get(i)); str.append(columnNames.get(i));
} }
str.append("[").append(columnNames.get(size - 1)).append("]"); str.append("[").append(columnNames.get(size - 1)).append("]");
return str.toString(); return str.toString();
} else { } else {
return columnNames.stream().collect(Collectors.joining(".")); return columnNames.stream().collect(Collectors.joining("."));
} }
} else { } else {
return prop.getColumnName().toCql(); return prop.getColumnName().toCql();
} }
} }
public HelenusEntity getEntity() { public HelenusEntity getEntity() {
if (next.isPresent()) { if (next.isPresent()) {
HelenusProperty last = prop; HelenusProperty last = prop;
for (HelenusProperty p : this) { for (HelenusProperty p : this) {
last = p; last = p;
} }
return last.getEntity(); return last.getEntity();
} else { } else {
return prop.getEntity(); return prop.getEntity();
} }
} }
public HelenusProperty getProperty() { public HelenusProperty getProperty() {
return prop; return prop;
} }
public Optional<HelenusPropertyNode> getNext() { public Optional<HelenusPropertyNode> getNext() {
return next; return next;
} }
public Iterator<HelenusProperty> iterator() { public Iterator<HelenusProperty> iterator() {
return new PropertyNodeIterator(Optional.of(this)); return new PropertyNodeIterator(Optional.of(this));
} }
private static class PropertyNodeIterator implements Iterator<HelenusProperty> { private static class PropertyNodeIterator implements Iterator<HelenusProperty> {
private Optional<HelenusPropertyNode> next; private Optional<HelenusPropertyNode> next;
public PropertyNodeIterator(Optional<HelenusPropertyNode> next) { public PropertyNodeIterator(Optional<HelenusPropertyNode> next) {
this.next = next; this.next = next;
} }
@Override @Override
public boolean hasNext() { public boolean hasNext() {
return next.isPresent(); return next.isPresent();
} }
@Override @Override
public HelenusProperty next() { public HelenusProperty next() {
HelenusPropertyNode node = next.get(); HelenusPropertyNode node = next.get();
next = node.next; next = node.next;
return node.prop; return node.prop;
} }
} }
} }

View file

@ -16,165 +16,164 @@
package net.helenus.core.reflect; package net.helenus.core.reflect;
import java.util.*; import java.util.*;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
import net.helenus.support.DslPropertyException; import net.helenus.support.DslPropertyException;
import net.helenus.support.HelenusMappingException; import net.helenus.support.HelenusMappingException;
public final class ListDsl<V> implements List<V> { public final class ListDsl<V> implements List<V> {
private final HelenusPropertyNode parent; private final HelenusPropertyNode parent;
public ListDsl(HelenusPropertyNode parent) { public ListDsl(HelenusPropertyNode parent) {
this.parent = parent; this.parent = parent;
} }
public HelenusPropertyNode getParent() { public HelenusPropertyNode getParent() {
return parent; return parent;
} }
@Override @Override
public V get(int index) { public V get(int index) {
HelenusProperty prop = new HelenusNamedProperty(Integer.toString(index)); HelenusProperty prop = new HelenusNamedProperty(Integer.toString(index));
throw new DslPropertyException(new HelenusPropertyNode(prop, Optional.of(parent))); throw new DslPropertyException(new HelenusPropertyNode(prop, Optional.of(parent)));
} }
@Override @Override
public int size() { public int size() {
throwShouldNeverCall(); throwShouldNeverCall();
return 0; return 0;
} }
@Override @Override
public boolean isEmpty() { public boolean isEmpty() {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public boolean contains(Object o) { public boolean contains(Object o) {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public Iterator<V> iterator() { public Iterator<V> iterator() {
throwShouldNeverCall(); throwShouldNeverCall();
return null; return null;
} }
@Override @Override
public Object[] toArray() { public Object[] toArray() {
throwShouldNeverCall(); throwShouldNeverCall();
return null; return null;
} }
@Override @Override
public <T> T[] toArray(T[] a) { public <T> T[] toArray(T[] a) {
throwShouldNeverCall(); throwShouldNeverCall();
return null; return null;
} }
@Override @Override
public boolean add(V e) { public boolean add(V e) {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public boolean remove(Object o) { public boolean remove(Object o) {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public boolean containsAll(Collection<?> c) { public boolean containsAll(Collection<?> c) {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public boolean addAll(Collection<? extends V> c) { public boolean addAll(Collection<? extends V> c) {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public boolean addAll(int index, Collection<? extends V> c) { public boolean addAll(int index, Collection<? extends V> c) {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public boolean removeAll(Collection<?> c) { public boolean removeAll(Collection<?> c) {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public boolean retainAll(Collection<?> c) { public boolean retainAll(Collection<?> c) {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public void clear() { public void clear() {
throwShouldNeverCall(); throwShouldNeverCall();
} }
@Override @Override
public V set(int index, V element) { public V set(int index, V element) {
throwShouldNeverCall(); throwShouldNeverCall();
return null; return null;
} }
@Override @Override
public void add(int index, V element) { public void add(int index, V element) {
throwShouldNeverCall(); throwShouldNeverCall();
} }
@Override @Override
public V remove(int index) { public V remove(int index) {
throwShouldNeverCall(); throwShouldNeverCall();
return null; return null;
} }
@Override @Override
public int indexOf(Object o) { public int indexOf(Object o) {
throwShouldNeverCall(); throwShouldNeverCall();
return 0; return 0;
} }
@Override @Override
public int lastIndexOf(Object o) { public int lastIndexOf(Object o) {
throwShouldNeverCall(); throwShouldNeverCall();
return 0; return 0;
} }
@Override @Override
public ListIterator<V> listIterator() { public ListIterator<V> listIterator() {
throwShouldNeverCall(); throwShouldNeverCall();
return null; return null;
} }
@Override @Override
public ListIterator<V> listIterator(int index) { public ListIterator<V> listIterator(int index) {
throwShouldNeverCall(); throwShouldNeverCall();
return null; return null;
} }
@Override @Override
public List<V> subList(int fromIndex, int toIndex) { public List<V> subList(int fromIndex, int toIndex) {
throwShouldNeverCall(); throwShouldNeverCall();
return null; return null;
} }
private void throwShouldNeverCall() { private void throwShouldNeverCall() {
throw new HelenusMappingException("should be never called"); throw new HelenusMappingException("should be never called");
} }
@Override @Override
public String toString() { public String toString() {
return "ListDsl"; return "ListDsl";
} }
} }

View file

@ -19,99 +19,98 @@ import java.util.Collection;
import java.util.Map; import java.util.Map;
import java.util.Optional; import java.util.Optional;
import java.util.Set; import java.util.Set;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
import net.helenus.support.DslPropertyException; import net.helenus.support.DslPropertyException;
import net.helenus.support.HelenusMappingException; import net.helenus.support.HelenusMappingException;
public final class MapDsl<K, V> implements Map<K, V> { public final class MapDsl<K, V> implements Map<K, V> {
private final HelenusPropertyNode parent; private final HelenusPropertyNode parent;
public MapDsl(HelenusPropertyNode parent) { public MapDsl(HelenusPropertyNode parent) {
this.parent = parent; this.parent = parent;
} }
public HelenusPropertyNode getParent() { public HelenusPropertyNode getParent() {
return parent; return parent;
} }
@Override @Override
public V get(Object key) { public V get(Object key) {
HelenusProperty prop = new HelenusNamedProperty(key.toString()); HelenusProperty prop = new HelenusNamedProperty(key.toString());
throw new DslPropertyException(new HelenusPropertyNode(prop, Optional.of(parent))); throw new DslPropertyException(new HelenusPropertyNode(prop, Optional.of(parent)));
} }
@Override @Override
public int size() { public int size() {
throwShouldNeverCall(); throwShouldNeverCall();
return 0; return 0;
} }
@Override @Override
public boolean isEmpty() { public boolean isEmpty() {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public boolean containsKey(Object key) { public boolean containsKey(Object key) {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public boolean containsValue(Object value) { public boolean containsValue(Object value) {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public V put(K key, V value) { public V put(K key, V value) {
throwShouldNeverCall(); throwShouldNeverCall();
return null; return null;
} }
@Override @Override
public V remove(Object key) { public V remove(Object key) {
throwShouldNeverCall(); throwShouldNeverCall();
return null; return null;
} }
@Override @Override
public void putAll(Map<? extends K, ? extends V> m) { public void putAll(Map<? extends K, ? extends V> m) {
throwShouldNeverCall(); throwShouldNeverCall();
} }
@Override @Override
public void clear() { public void clear() {
throwShouldNeverCall(); throwShouldNeverCall();
} }
@Override @Override
public Set<K> keySet() { public Set<K> keySet() {
throwShouldNeverCall(); throwShouldNeverCall();
return null; return null;
} }
@Override @Override
public Collection<V> values() { public Collection<V> values() {
throwShouldNeverCall(); throwShouldNeverCall();
return null; return null;
} }
@Override @Override
public Set<java.util.Map.Entry<K, V>> entrySet() { public Set<java.util.Map.Entry<K, V>> entrySet() {
throwShouldNeverCall(); throwShouldNeverCall();
return null; return null;
} }
private void throwShouldNeverCall() { private void throwShouldNeverCall() {
throw new HelenusMappingException("should be never called"); throw new HelenusMappingException("should be never called");
} }
@Override @Override
public String toString() { public String toString() {
return "MapDsl"; return "MapDsl";
} }
} }

View file

@ -19,7 +19,7 @@ import java.util.Map;
public interface MapExportable { public interface MapExportable {
public static final String TO_MAP_METHOD = "toMap"; public static final String TO_MAP_METHOD = "toMap";
Map<String, Object> toMap(); Map<String, Object> toMap();
} }

View file

@ -27,155 +27,161 @@ import java.lang.reflect.Proxy;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import net.helenus.core.Helenus; import net.helenus.core.Helenus;
import net.helenus.mapping.annotation.Transient; import net.helenus.mapping.annotation.Transient;
import net.helenus.mapping.value.ValueProviderMap; import net.helenus.mapping.value.ValueProviderMap;
import net.helenus.support.HelenusException; import net.helenus.support.HelenusException;
public class MapperInvocationHandler<E> implements InvocationHandler, Serializable { public class MapperInvocationHandler<E> implements InvocationHandler, Serializable {
private static final long serialVersionUID = -7044209982830584984L; private static final long serialVersionUID = -7044209982830584984L;
private final Map<String, Object> src; private final Map<String, Object> src;
private final Class<E> iface; private final Class<E> iface;
public MapperInvocationHandler(Class<E> iface, Map<String, Object> src) { public MapperInvocationHandler(Class<E> iface, Map<String, Object> src) {
this.src = src; this.src = src;
this.iface = iface; this.iface = iface;
} }
private Object invokeDefault(Object proxy, Method method, Object[] args) throws Throwable { private Object invokeDefault(Object proxy, Method method, Object[] args) throws Throwable {
// NOTE: This is reflection magic to invoke (non-recursively) a default method // NOTE: This is reflection magic to invoke (non-recursively) a default method
// implemented on an interface // implemented on an interface
// that we've proxied (in ReflectionDslInstantiator). I found the answer in this // that we've proxied (in ReflectionDslInstantiator). I found the answer in this
// article. // article.
// https://zeroturnaround.com/rebellabs/recognize-and-conquer-java-proxies-default-methods-and-method-handles/ // https://zeroturnaround.com/rebellabs/recognize-and-conquer-java-proxies-default-methods-and-method-handles/
// First, we need an instance of a private inner-class found in MethodHandles. // First, we need an instance of a private inner-class found in MethodHandles.
Constructor<MethodHandles.Lookup> constructor = MethodHandles.Lookup.class.getDeclaredConstructor(Class.class, Constructor<MethodHandles.Lookup> constructor =
int.class); MethodHandles.Lookup.class.getDeclaredConstructor(Class.class, int.class);
constructor.setAccessible(true); constructor.setAccessible(true);
// Now we need to lookup and invoke special the default method on the interface // Now we need to lookup and invoke special the default method on the interface
// class. // class.
final Class<?> declaringClass = method.getDeclaringClass(); final Class<?> declaringClass = method.getDeclaringClass();
Object result = constructor.newInstance(declaringClass, MethodHandles.Lookup.PRIVATE) Object result =
.unreflectSpecial(method, declaringClass).bindTo(proxy).invokeWithArguments(args); constructor
return result; .newInstance(declaringClass, MethodHandles.Lookup.PRIVATE)
} .unreflectSpecial(method, declaringClass)
.bindTo(proxy)
.invokeWithArguments(args);
return result;
}
private Object writeReplace() { private Object writeReplace() {
return new SerializationProxy<E>(this); return new SerializationProxy<E>(this);
} }
private void readObject(ObjectInputStream stream) throws InvalidObjectException {
throw new InvalidObjectException("Proxy required.");
}
@Override private void readObject(ObjectInputStream stream) throws InvalidObjectException {
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { throw new InvalidObjectException("Proxy required.");
}
// Transient, default methods should simply be invoked as-is. @Override
if (method.isDefault() && method.getDeclaredAnnotation(Transient.class) != null) { public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
return invokeDefault(proxy, method, args);
}
String methodName = method.getName(); // Transient, default methods should simply be invoked as-is.
if (method.isDefault() && method.getDeclaredAnnotation(Transient.class) != null) {
return invokeDefault(proxy, method, args);
}
if ("equals".equals(methodName) && method.getParameterCount() == 1) { String methodName = method.getName();
Object otherObj = args[0];
if (otherObj == null) {
return false;
}
if (Proxy.isProxyClass(otherObj.getClass())) {
if (this == Proxy.getInvocationHandler(otherObj)) {
return true;
}
}
if (otherObj instanceof MapExportable && src.equals(((MapExportable) otherObj).toMap())) {
return true;
}
return false;
}
if (method.getParameterCount() != 0 || method.getReturnType() == void.class) { if ("equals".equals(methodName) && method.getParameterCount() == 1) {
throw new HelenusException("invalid getter method " + method); Object otherObj = args[0];
} if (otherObj == null) {
return false;
}
if (Proxy.isProxyClass(otherObj.getClass())) {
if (this == Proxy.getInvocationHandler(otherObj)) {
return true;
}
}
if (otherObj instanceof MapExportable && src.equals(((MapExportable) otherObj).toMap())) {
return true;
}
if (src instanceof MapExportable && otherObj.equals(((MapExportable) src).toMap())) {
return true;
}
return false;
}
if ("hashCode".equals(methodName)) { if (method.getParameterCount() != 0 || method.getReturnType() == void.class) {
return hashCode(); throw new HelenusException("invalid getter method " + method);
} }
if ("toString".equals(methodName)) { if ("hashCode".equals(methodName)) {
return iface.getSimpleName() + ": " + src.toString(); return hashCode();
} }
if ("writeReplace".equals(methodName)) { if ("toString".equals(methodName)) {
return new SerializationProxy(this); return iface.getSimpleName() + ": " + src.toString();
} }
if ("readObject".equals(methodName)) { if ("writeReplace".equals(methodName)) {
throw new InvalidObjectException("Proxy required."); return new SerializationProxy(this);
} }
if ("dsl".equals(methodName)) { if ("readObject".equals(methodName)) {
return Helenus.dsl(iface); throw new InvalidObjectException("Proxy required.");
} }
if (MapExportable.TO_MAP_METHOD.equals(methodName)) { if ("dsl".equals(methodName)) {
return src; // return Collections.unmodifiableMap(src); return Helenus.dsl(iface);
} }
Object value = src.get(methodName); if (MapExportable.TO_MAP_METHOD.equals(methodName)) {
return src; // Collections.unmodifiableMap(src);
}
Class<?> returnType = method.getReturnType(); Object value = src.get(methodName);
if (value == null) { Class<?> returnType = method.getReturnType();
// Default implementations of non-Transient methods in entities are the default if (value == null) {
// value when the
// map contains 'null'.
if (method.isDefault()) {
return invokeDefault(proxy, method, args);
}
// Otherwise, if the return type of the method is a primitive Java type then // Default implementations of non-Transient methods in entities are the default
// we'll return the standard // value when the
// default values to avoid a NPE in user code. // map contains 'null'.
if (returnType.isPrimitive()) { if (method.isDefault()) {
DefaultPrimitiveTypes type = DefaultPrimitiveTypes.lookup(returnType); return invokeDefault(proxy, method, args);
if (type == null) { }
throw new HelenusException("unknown primitive type " + returnType);
}
return type.getDefaultValue();
}
}
return value; // Otherwise, if the return type of the method is a primitive Java type then
} // we'll return the standard
// default values to avoid a NPE in user code.
if (returnType.isPrimitive()) {
DefaultPrimitiveTypes type = DefaultPrimitiveTypes.lookup(returnType);
if (type == null) {
throw new HelenusException("unknown primitive type " + returnType);
}
return type.getDefaultValue();
}
}
static class SerializationProxy<E> implements Serializable { return value;
}
private static final long serialVersionUID = -5617583940055969353L; static class SerializationProxy<E> implements Serializable {
private final Class<E> iface; private static final long serialVersionUID = -5617583940055969353L;
private final Map<String, Object> src;
public SerializationProxy(MapperInvocationHandler mapper) { private final Class<E> iface;
this.iface = mapper.iface; private final Map<String, Object> src;
if (mapper.src instanceof ValueProviderMap) {
this.src = new HashMap<String, Object>(mapper.src.size());
Set<String> keys = mapper.src.keySet();
for (String key : keys) {
this.src.put(key, mapper.src.get(key));
}
} else {
this.src = mapper.src;
}
}
Object readResolve() throws ObjectStreamException { public SerializationProxy(MapperInvocationHandler mapper) {
return new MapperInvocationHandler(iface, src); this.iface = mapper.iface;
} if (mapper.src instanceof ValueProviderMap) {
this.src = new HashMap<String, Object>(mapper.src.size());
Set<String> keys = mapper.src.keySet();
for (String key : keys) {
this.src.put(key, mapper.src.get(key));
}
} else {
this.src = mapper.src;
}
}
} Object readResolve() throws ObjectStreamException {
return new MapperInvocationHandler(iface, src);
}
}
} }

View file

@ -15,22 +15,25 @@
*/ */
package net.helenus.core.reflect; package net.helenus.core.reflect;
import com.datastax.driver.core.Metadata;
import java.lang.reflect.Proxy; import java.lang.reflect.Proxy;
import java.util.Optional; import java.util.Optional;
import com.datastax.driver.core.Metadata;
import net.helenus.core.DslInstantiator; import net.helenus.core.DslInstantiator;
public enum ReflectionDslInstantiator implements DslInstantiator { public enum ReflectionDslInstantiator implements DslInstantiator {
INSTANCE; INSTANCE;
@Override @Override
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public <E> E instantiate(Class<E> iface, ClassLoader classLoader, Optional<HelenusPropertyNode> parent, public <E> E instantiate(
Metadata metadata) { Class<E> iface,
DslInvocationHandler<E> handler = new DslInvocationHandler<E>(iface, classLoader, parent, metadata); ClassLoader classLoader,
E proxy = (E) Proxy.newProxyInstance(classLoader, new Class[]{iface, DslExportable.class}, handler); Optional<HelenusPropertyNode> parent,
return proxy; Metadata metadata) {
} DslInvocationHandler<E> handler =
new DslInvocationHandler<E>(iface, classLoader, parent, metadata);
E proxy =
(E) Proxy.newProxyInstance(classLoader, new Class[] {iface, DslExportable.class}, handler);
return proxy;
}
} }

View file

@ -19,15 +19,14 @@ import net.helenus.support.HelenusMappingException;
public final class ReflectionInstantiator { public final class ReflectionInstantiator {
private ReflectionInstantiator() { private ReflectionInstantiator() {}
}
public static <T> T instantiateClass(Class<T> clazz) { public static <T> T instantiateClass(Class<T> clazz) {
try { try {
return clazz.newInstance(); return clazz.newInstance();
} catch (InstantiationException | IllegalAccessException e) { } catch (InstantiationException | IllegalAccessException e) {
throw new HelenusMappingException("invalid class " + clazz, e); throw new HelenusMappingException("invalid class " + clazz, e);
} }
} }
} }

View file

@ -18,19 +18,20 @@ package net.helenus.core.reflect;
import java.io.Serializable; import java.io.Serializable;
import java.lang.reflect.Proxy; import java.lang.reflect.Proxy;
import java.util.Map; import java.util.Map;
import net.helenus.core.MapperInstantiator; import net.helenus.core.MapperInstantiator;
public enum ReflectionMapperInstantiator implements MapperInstantiator { public enum ReflectionMapperInstantiator implements MapperInstantiator {
INSTANCE; INSTANCE;
@Override @Override
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public <E> E instantiate(Class<E> iface, Map<String, Object> src, ClassLoader classLoader) { public <E> E instantiate(Class<E> iface, Map<String, Object> src, ClassLoader classLoader) {
MapperInvocationHandler<E> handler = new MapperInvocationHandler<E>(iface, src); MapperInvocationHandler<E> handler = new MapperInvocationHandler<E>(iface, src);
E proxy = (E) Proxy.newProxyInstance(classLoader, new Class[]{iface, MapExportable.class, Serializable.class}, E proxy =
handler); (E)
return proxy; Proxy.newProxyInstance(
} classLoader, new Class[] {iface, MapExportable.class, Serializable.class}, handler);
return proxy;
}
} }

View file

@ -18,104 +18,103 @@ package net.helenus.core.reflect;
import java.util.Collection; import java.util.Collection;
import java.util.Iterator; import java.util.Iterator;
import java.util.Set; import java.util.Set;
import net.helenus.support.HelenusMappingException; import net.helenus.support.HelenusMappingException;
public final class SetDsl<V> implements Set<V> { public final class SetDsl<V> implements Set<V> {
private final HelenusPropertyNode parent; private final HelenusPropertyNode parent;
public SetDsl(HelenusPropertyNode parent) { public SetDsl(HelenusPropertyNode parent) {
this.parent = parent; this.parent = parent;
} }
public HelenusPropertyNode getParent() { public HelenusPropertyNode getParent() {
return parent; return parent;
} }
@Override @Override
public int size() { public int size() {
throwShouldNeverCall(); throwShouldNeverCall();
return 0; return 0;
} }
@Override @Override
public boolean isEmpty() { public boolean isEmpty() {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public boolean contains(Object o) { public boolean contains(Object o) {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public Iterator<V> iterator() { public Iterator<V> iterator() {
throwShouldNeverCall(); throwShouldNeverCall();
return null; return null;
} }
@Override @Override
public Object[] toArray() { public Object[] toArray() {
throwShouldNeverCall(); throwShouldNeverCall();
return null; return null;
} }
@Override @Override
public <T> T[] toArray(T[] a) { public <T> T[] toArray(T[] a) {
throwShouldNeverCall(); throwShouldNeverCall();
return null; return null;
} }
@Override @Override
public boolean add(V e) { public boolean add(V e) {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public boolean remove(Object o) { public boolean remove(Object o) {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public boolean containsAll(Collection<?> c) { public boolean containsAll(Collection<?> c) {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public boolean addAll(Collection<? extends V> c) { public boolean addAll(Collection<? extends V> c) {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public boolean retainAll(Collection<?> c) { public boolean retainAll(Collection<?> c) {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public boolean removeAll(Collection<?> c) { public boolean removeAll(Collection<?> c) {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public void clear() { public void clear() {
throwShouldNeverCall(); throwShouldNeverCall();
} }
private void throwShouldNeverCall() { private void throwShouldNeverCall() {
throw new HelenusMappingException("should be never called"); throw new HelenusMappingException("should be never called");
} }
@Override @Override
public String toString() { public String toString() {
return "SetDsl"; return "SetDsl";
} }
} }

View file

@ -16,7 +16,6 @@
package net.helenus.mapping; package net.helenus.mapping;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import net.helenus.mapping.annotation.ClusteringColumn; import net.helenus.mapping.annotation.ClusteringColumn;
import net.helenus.mapping.annotation.Column; import net.helenus.mapping.annotation.Column;
import net.helenus.mapping.annotation.PartitionKey; import net.helenus.mapping.annotation.PartitionKey;
@ -25,91 +24,99 @@ import net.helenus.support.HelenusMappingException;
public final class ColumnInformation { public final class ColumnInformation {
private final IdentityName columnName; private final IdentityName columnName;
private final ColumnType columnType; private final ColumnType columnType;
private final int ordinal; private final int ordinal;
private final OrderingDirection ordering; private final OrderingDirection ordering;
public ColumnInformation(Method getter) { public ColumnInformation(Method getter) {
String columnName = null; String columnName = null;
boolean forceQuote = false; boolean forceQuote = false;
ColumnType columnTypeLocal = ColumnType.COLUMN; ColumnType columnTypeLocal = ColumnType.COLUMN;
int ordinalLocal = 0; int ordinalLocal = 0;
OrderingDirection orderingLocal = OrderingDirection.ASC; OrderingDirection orderingLocal = OrderingDirection.ASC;
PartitionKey partitionKey = getter.getDeclaredAnnotation(PartitionKey.class); PartitionKey partitionKey = getter.getDeclaredAnnotation(PartitionKey.class);
if (partitionKey != null) { if (partitionKey != null) {
columnName = partitionKey.value(); columnName = partitionKey.value();
forceQuote = partitionKey.forceQuote(); forceQuote = partitionKey.forceQuote();
columnTypeLocal = ColumnType.PARTITION_KEY; columnTypeLocal = ColumnType.PARTITION_KEY;
ordinalLocal = partitionKey.ordinal(); ordinalLocal = partitionKey.ordinal();
} }
ClusteringColumn clusteringColumn = getter.getDeclaredAnnotation(ClusteringColumn.class); ClusteringColumn clusteringColumn = getter.getDeclaredAnnotation(ClusteringColumn.class);
if (clusteringColumn != null) { if (clusteringColumn != null) {
ensureSingleColumnType(columnTypeLocal, getter); ensureSingleColumnType(columnTypeLocal, getter);
columnName = clusteringColumn.value(); columnName = clusteringColumn.value();
forceQuote = clusteringColumn.forceQuote(); forceQuote = clusteringColumn.forceQuote();
columnTypeLocal = ColumnType.CLUSTERING_COLUMN; columnTypeLocal = ColumnType.CLUSTERING_COLUMN;
ordinalLocal = clusteringColumn.ordinal(); ordinalLocal = clusteringColumn.ordinal();
orderingLocal = clusteringColumn.ordering(); orderingLocal = clusteringColumn.ordering();
} }
StaticColumn staticColumn = getter.getDeclaredAnnotation(StaticColumn.class); StaticColumn staticColumn = getter.getDeclaredAnnotation(StaticColumn.class);
if (staticColumn != null) { if (staticColumn != null) {
ensureSingleColumnType(columnTypeLocal, getter); ensureSingleColumnType(columnTypeLocal, getter);
columnName = staticColumn.value(); columnName = staticColumn.value();
forceQuote = staticColumn.forceQuote(); forceQuote = staticColumn.forceQuote();
columnTypeLocal = ColumnType.STATIC_COLUMN; columnTypeLocal = ColumnType.STATIC_COLUMN;
ordinalLocal = staticColumn.ordinal(); ordinalLocal = staticColumn.ordinal();
} }
Column column = getter.getDeclaredAnnotation(Column.class); Column column = getter.getDeclaredAnnotation(Column.class);
if (column != null) { if (column != null) {
ensureSingleColumnType(columnTypeLocal, getter); ensureSingleColumnType(columnTypeLocal, getter);
columnName = column.value(); columnName = column.value();
forceQuote = column.forceQuote(); forceQuote = column.forceQuote();
columnTypeLocal = ColumnType.COLUMN; columnTypeLocal = ColumnType.COLUMN;
ordinalLocal = column.ordinal(); ordinalLocal = column.ordinal();
} }
if (columnName == null || columnName.isEmpty()) { if (columnName == null || columnName.isEmpty()) {
columnName = MappingUtil.getDefaultColumnName(getter); columnName = MappingUtil.getDefaultColumnName(getter);
} }
this.columnName = new IdentityName(columnName, forceQuote); this.columnName = new IdentityName(columnName, forceQuote);
this.columnType = columnTypeLocal; this.columnType = columnTypeLocal;
this.ordinal = ordinalLocal; this.ordinal = ordinalLocal;
this.ordering = orderingLocal; this.ordering = orderingLocal;
} }
public IdentityName getColumnName() { public IdentityName getColumnName() {
return columnName; return columnName;
} }
public ColumnType getColumnType() { public ColumnType getColumnType() {
return columnType; return columnType;
} }
public int getOrdinal() { public int getOrdinal() {
return ordinal; return ordinal;
} }
public OrderingDirection getOrdering() { public OrderingDirection getOrdering() {
return ordering; return ordering;
} }
private void ensureSingleColumnType(ColumnType columnTypeLocal, Method getter) { private void ensureSingleColumnType(ColumnType columnTypeLocal, Method getter) {
if (columnTypeLocal != ColumnType.COLUMN) { if (columnTypeLocal != ColumnType.COLUMN) {
throw new HelenusMappingException("property can be annotated only by a single column type " + getter); throw new HelenusMappingException(
} "property can be annotated only by a single column type " + getter);
} }
}
@Override @Override
public String toString() { public String toString() {
return "ColumnInformation [columnName=" + columnName + ", columnType=" + columnType + ", ordinal=" + ordinal return "ColumnInformation [columnName="
+ ", ordering=" + ordering + "]"; + columnName
} + ", columnType="
+ columnType
+ ", ordinal="
+ ordinal
+ ", ordering="
+ ordering
+ "]";
}
} }

View file

@ -16,5 +16,8 @@
package net.helenus.mapping; package net.helenus.mapping;
public enum ColumnType { public enum ColumnType {
PARTITION_KEY, CLUSTERING_COLUMN, STATIC_COLUMN, COLUMN; PARTITION_KEY,
CLUSTERING_COLUMN,
STATIC_COLUMN,
COLUMN;
} }

View file

@ -17,22 +17,21 @@ package net.helenus.mapping;
import java.util.Collection; import java.util.Collection;
import java.util.List; import java.util.List;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
public interface HelenusEntity { public interface HelenusEntity {
HelenusEntityType getType(); HelenusEntityType getType();
boolean isCacheable(); boolean isCacheable();
Class<?> getMappingInterface(); Class<?> getMappingInterface();
IdentityName getName(); IdentityName getName();
Collection<HelenusProperty> getOrderedProperties(); Collection<HelenusProperty> getOrderedProperties();
HelenusProperty getProperty(String name); HelenusProperty getProperty(String name);
List<Facet> getFacets(); List<Facet> getFacets();
} }

View file

@ -16,5 +16,8 @@
package net.helenus.mapping; package net.helenus.mapping;
public enum HelenusEntityType { public enum HelenusEntityType {
TABLE, VIEW, TUPLE, UDT; TABLE,
VIEW,
TUPLE,
UDT;
} }

View file

@ -15,18 +15,13 @@
*/ */
package net.helenus.mapping; package net.helenus.mapping;
import java.lang.reflect.Method;
import java.util.*;
import javax.validation.ConstraintValidator;
import org.apache.commons.lang3.ClassUtils;
import com.datastax.driver.core.DefaultMetadata; import com.datastax.driver.core.DefaultMetadata;
import com.datastax.driver.core.Metadata; import com.datastax.driver.core.Metadata;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import java.lang.reflect.Method;
import java.util.*;
import javax.validation.ConstraintValidator;
import net.helenus.config.HelenusSettings; import net.helenus.config.HelenusSettings;
import net.helenus.core.Helenus; import net.helenus.core.Helenus;
import net.helenus.core.annotation.Cacheable; import net.helenus.core.annotation.Cacheable;
@ -35,278 +30,301 @@ import net.helenus.core.cache.UnboundFacet;
import net.helenus.mapping.annotation.*; import net.helenus.mapping.annotation.*;
import net.helenus.mapping.validator.DistinctValidator; import net.helenus.mapping.validator.DistinctValidator;
import net.helenus.support.HelenusMappingException; import net.helenus.support.HelenusMappingException;
import org.apache.commons.lang3.ClassUtils;
public final class HelenusMappingEntity implements HelenusEntity { public final class HelenusMappingEntity implements HelenusEntity {
private final Class<?> iface; private final Class<?> iface;
private final HelenusEntityType type; private final HelenusEntityType type;
private final IdentityName name; private final IdentityName name;
private final boolean cacheable; private final boolean cacheable;
private final ImmutableMap<String, Method> methods; private final ImmutableMap<String, Method> methods;
private final ImmutableMap<String, HelenusProperty> props; private final ImmutableMap<String, HelenusProperty> props;
private final ImmutableList<HelenusProperty> orderedProps; private final ImmutableList<HelenusProperty> orderedProps;
private final List<Facet> facets; private final List<Facet> facets;
public HelenusMappingEntity(Class<?> iface, Metadata metadata) { public HelenusMappingEntity(Class<?> iface, Metadata metadata) {
this(iface, autoDetectType(iface), metadata); this(iface, autoDetectType(iface), metadata);
} }
public HelenusMappingEntity(Class<?> iface, HelenusEntityType type, Metadata metadata) { public HelenusMappingEntity(Class<?> iface, HelenusEntityType type, Metadata metadata) {
if (iface == null || !iface.isInterface()) { if (iface == null || !iface.isInterface()) {
throw new IllegalArgumentException("invalid parameter " + iface); throw new IllegalArgumentException("invalid parameter " + iface);
} }
this.iface = iface; this.iface = iface;
this.type = Objects.requireNonNull(type, "type is empty"); this.type = Objects.requireNonNull(type, "type is empty");
this.name = resolveName(iface, type); this.name = resolveName(iface, type);
HelenusSettings settings = Helenus.settings(); HelenusSettings settings = Helenus.settings();
Map<String, Method> methods = new HashMap<String, Method>(); Map<String, Method> methods = new HashMap<String, Method>();
for (Method m : iface.getDeclaredMethods()) { for (Method m : iface.getDeclaredMethods()) {
methods.put(m.getName(), m); methods.put(m.getName(), m);
} }
for (Class<?> c : ClassUtils.getAllInterfaces(iface)) { for (Class<?> c : ClassUtils.getAllInterfaces(iface)) {
if (c.getDeclaredAnnotation(Table.class) != null || c.getDeclaredAnnotation(InheritedTable.class) != null) { if (c.getDeclaredAnnotation(Table.class) != null
for (Method m : c.getDeclaredMethods()) { || c.getDeclaredAnnotation(InheritedTable.class) != null) {
Method o = methods.get(m.getName()); for (Method m : c.getDeclaredMethods()) {
if (o != null) { Method o = methods.get(m.getName());
// Prefer overridden method implementation. if (o != null) {
if (o.getDeclaringClass().isAssignableFrom(m.getDeclaringClass())) { // Prefer overridden method implementation.
methods.put(m.getName(), m); if (o.getDeclaringClass().isAssignableFrom(m.getDeclaringClass())) {
} methods.put(m.getName(), m);
} else { }
methods.put(m.getName(), m); } else {
} methods.put(m.getName(), m);
} }
} }
} }
}
List<HelenusProperty> propsLocal = new ArrayList<HelenusProperty>(); List<HelenusProperty> propsLocal = new ArrayList<HelenusProperty>();
ImmutableMap.Builder<String, HelenusProperty> propsBuilder = ImmutableMap.builder(); ImmutableMap.Builder<String, HelenusProperty> propsBuilder = ImmutableMap.builder();
ImmutableMap.Builder<String, Method> methodsBuilder = ImmutableMap.builder(); ImmutableMap.Builder<String, Method> methodsBuilder = ImmutableMap.builder();
for (Method method : methods.values()) { for (Method method : methods.values()) {
if (settings.getGetterMethodDetector().apply(method)) { if (settings.getGetterMethodDetector().apply(method)) {
methodsBuilder.put(method.getName(), method); methodsBuilder.put(method.getName(), method);
if (metadata != null) { if (metadata != null) {
HelenusProperty prop = new HelenusMappingProperty(this, method, metadata); HelenusProperty prop = new HelenusMappingProperty(this, method, metadata);
propsBuilder.put(prop.getPropertyName(), prop); propsBuilder.put(prop.getPropertyName(), prop);
propsLocal.add(prop); propsLocal.add(prop);
} }
} }
} }
this.methods = methodsBuilder.build(); this.methods = methodsBuilder.build();
this.props = propsBuilder.build(); this.props = propsBuilder.build();
Collections.sort(propsLocal, TypeAndOrdinalColumnComparator.INSTANCE); Collections.sort(propsLocal, TypeAndOrdinalColumnComparator.INSTANCE);
this.orderedProps = ImmutableList.copyOf(propsLocal); this.orderedProps = ImmutableList.copyOf(propsLocal);
validateOrdinals(); validateOrdinals();
// Caching // Caching
cacheable = (null != iface.getDeclaredAnnotation(Cacheable.class)); cacheable = (null != iface.getDeclaredAnnotation(Cacheable.class));
List<HelenusProperty> primaryKeyProperties = new ArrayList<>(); List<HelenusProperty> primaryKeyProperties = new ArrayList<>();
ImmutableList.Builder<Facet> facetsBuilder = ImmutableList.builder(); ImmutableList.Builder<Facet> facetsBuilder = ImmutableList.builder();
facetsBuilder.add(new Facet("table", name.toCql()).setFixed()); facetsBuilder.add(new Facet("table", name.toCql()).setFixed());
for (HelenusProperty prop : orderedProps) { for (HelenusProperty prop : orderedProps) {
switch (prop.getColumnType()) { switch (prop.getColumnType()) {
case PARTITION_KEY : case PARTITION_KEY:
case CLUSTERING_COLUMN : case CLUSTERING_COLUMN:
primaryKeyProperties.add(prop); primaryKeyProperties.add(prop);
break; break;
default : default:
if (primaryKeyProperties != null && primaryKeyProperties.size() > 0) { if (primaryKeyProperties != null && primaryKeyProperties.size() > 0) {
facetsBuilder.add(new UnboundFacet(primaryKeyProperties)); facetsBuilder.add(new UnboundFacet(primaryKeyProperties));
primaryKeyProperties = null; primaryKeyProperties = null;
} }
for (ConstraintValidator<?, ?> constraint : MappingUtil.getValidators(prop.getGetterMethod())) { for (ConstraintValidator<?, ?> constraint :
if (constraint.getClass().isAssignableFrom(DistinctValidator.class)) { MappingUtil.getValidators(prop.getGetterMethod())) {
UnboundFacet facet = new UnboundFacet(prop); if (constraint.getClass().isAssignableFrom(DistinctValidator.class)) {
facetsBuilder.add(facet); UnboundFacet facet = new UnboundFacet(prop);
break; facetsBuilder.add(facet);
} break;
} }
} }
} }
if (primaryKeyProperties != null && primaryKeyProperties.size() > 0) { }
facetsBuilder.add(new UnboundFacet(primaryKeyProperties)); if (primaryKeyProperties != null && primaryKeyProperties.size() > 0) {
} facetsBuilder.add(new UnboundFacet(primaryKeyProperties));
this.facets = facetsBuilder.build(); }
} this.facets = facetsBuilder.build();
}
private static IdentityName resolveName(Class<?> iface, HelenusEntityType type) { private static IdentityName resolveName(Class<?> iface, HelenusEntityType type) {
switch (type) { switch (type) {
case TABLE : case TABLE:
return MappingUtil.getTableName(iface, true); return MappingUtil.getTableName(iface, true);
case VIEW : case VIEW:
return MappingUtil.getViewName(iface, true); return MappingUtil.getViewName(iface, true);
case TUPLE : case TUPLE:
return IdentityName.of(MappingUtil.getDefaultEntityName(iface), false); return IdentityName.of(MappingUtil.getDefaultEntityName(iface), false);
case UDT : case UDT:
return MappingUtil.getUserDefinedTypeName(iface, true); return MappingUtil.getUserDefinedTypeName(iface, true);
} }
throw new HelenusMappingException("invalid entity type " + type + " in " + type); throw new HelenusMappingException("invalid entity type " + type + " in " + type);
} }
private static HelenusEntityType autoDetectType(Class<?> iface) { private static HelenusEntityType autoDetectType(Class<?> iface) {
Objects.requireNonNull(iface, "empty iface"); Objects.requireNonNull(iface, "empty iface");
if (null != iface.getDeclaredAnnotation(Table.class)) { if (null != iface.getDeclaredAnnotation(Table.class)) {
return HelenusEntityType.TABLE; return HelenusEntityType.TABLE;
} else if (null != iface.getDeclaredAnnotation(MaterializedView.class)) { } else if (null != iface.getDeclaredAnnotation(MaterializedView.class)) {
return HelenusEntityType.VIEW; return HelenusEntityType.VIEW;
} else if (null != iface.getDeclaredAnnotation(Tuple.class)) { } else if (null != iface.getDeclaredAnnotation(Tuple.class)) {
return HelenusEntityType.TUPLE; return HelenusEntityType.TUPLE;
} else if (null != iface.getDeclaredAnnotation(UDT.class)) { } else if (null != iface.getDeclaredAnnotation(UDT.class)) {
return HelenusEntityType.UDT; return HelenusEntityType.UDT;
} }
throw new HelenusMappingException("entity must be annotated by @Table or @Tuple or @UserDefinedType " + iface); throw new HelenusMappingException(
} "entity must be annotated by @Table or @Tuple or @UserDefinedType " + iface);
}
@Override @Override
public HelenusEntityType getType() { public HelenusEntityType getType() {
return type; return type;
} }
@Override @Override
public boolean isCacheable() { public boolean isCacheable() {
return cacheable; return cacheable;
} }
@Override @Override
public Class<?> getMappingInterface() { public Class<?> getMappingInterface() {
return iface; return iface;
} }
@Override @Override
public Collection<HelenusProperty> getOrderedProperties() { public Collection<HelenusProperty> getOrderedProperties() {
return orderedProps; return orderedProps;
} }
@Override @Override
public HelenusProperty getProperty(String name) { public HelenusProperty getProperty(String name) {
HelenusProperty property = props.get(name); HelenusProperty property = props.get(name);
if (property == null && methods.containsKey(name)) { if (property == null && methods.containsKey(name)) {
property = new HelenusMappingProperty(this, methods.get(name), new DefaultMetadata()); property = new HelenusMappingProperty(this, methods.get(name), new DefaultMetadata());
return property; // TODO(gburd): review adding these into the props map... return property; // TODO(gburd): review adding these into the props map...
} }
return props.get(name); return props.get(name);
} }
@Override @Override
public List<Facet> getFacets() { public List<Facet> getFacets() {
return facets; return facets;
} }
@Override @Override
public IdentityName getName() { public IdentityName getName() {
return name; return name;
} }
private void validateOrdinals() { private void validateOrdinals() {
switch (getType()) { switch (getType()) {
case TABLE : case TABLE:
validateOrdinalsForTable(); validateOrdinalsForTable();
break; break;
case TUPLE : case TUPLE:
validateOrdinalsInTuple(); validateOrdinalsInTuple();
break; break;
default : default:
break; break;
} }
} }
private void validateOrdinalsForTable() { private void validateOrdinalsForTable() {
BitSet partitionKeys = new BitSet(); BitSet partitionKeys = new BitSet();
BitSet clusteringColumns = new BitSet(); BitSet clusteringColumns = new BitSet();
for (HelenusProperty prop : getOrderedProperties()) { for (HelenusProperty prop : getOrderedProperties()) {
ColumnType type = prop.getColumnType(); ColumnType type = prop.getColumnType();
int ordinal = prop.getOrdinal(); int ordinal = prop.getOrdinal();
switch (type) { switch (type) {
case PARTITION_KEY : case PARTITION_KEY:
if (partitionKeys.get(ordinal)) { if (partitionKeys.get(ordinal)) {
throw new HelenusMappingException( throw new HelenusMappingException(
"detected two or more partition key columns with the same ordinal " + ordinal + " in " "detected two or more partition key columns with the same ordinal "
+ prop.getEntity()); + ordinal
} + " in "
partitionKeys.set(ordinal); + prop.getEntity());
break; }
partitionKeys.set(ordinal);
break;
case CLUSTERING_COLUMN : case CLUSTERING_COLUMN:
if (clusteringColumns.get(ordinal)) { if (clusteringColumns.get(ordinal)) {
throw new HelenusMappingException("detected two or clustering columns with the same ordinal " throw new HelenusMappingException(
+ ordinal + " in " + prop.getEntity()); "detected two or clustering columns with the same ordinal "
} + ordinal
clusteringColumns.set(ordinal); + " in "
break; + prop.getEntity());
}
clusteringColumns.set(ordinal);
break;
default : default:
break; break;
} }
} }
} }
private void validateOrdinalsInTuple() { private void validateOrdinalsInTuple() {
boolean[] ordinals = new boolean[props.size()]; boolean[] ordinals = new boolean[props.size()];
getOrderedProperties().forEach(p -> { getOrderedProperties()
int ordinal = p.getOrdinal(); .forEach(
p -> {
int ordinal = p.getOrdinal();
if (ordinal < 0 || ordinal >= ordinals.length) { if (ordinal < 0 || ordinal >= ordinals.length) {
throw new HelenusMappingException("invalid ordinal " + ordinal + " found for property " throw new HelenusMappingException(
+ p.getPropertyName() + " in " + p.getEntity()); "invalid ordinal "
} + ordinal
+ " found for property "
+ p.getPropertyName()
+ " in "
+ p.getEntity());
}
if (ordinals[ordinal]) { if (ordinals[ordinal]) {
throw new HelenusMappingException( throw new HelenusMappingException(
"detected two or more properties with the same ordinal " + ordinal + " in " + p.getEntity()); "detected two or more properties with the same ordinal "
} + ordinal
+ " in "
+ p.getEntity());
}
ordinals[ordinal] = true; ordinals[ordinal] = true;
}); });
for (int i = 0; i != ordinals.length; ++i) { for (int i = 0; i != ordinals.length; ++i) {
if (!ordinals[i]) { if (!ordinals[i]) {
throw new HelenusMappingException("detected absent ordinal " + i + " in " + this); throw new HelenusMappingException("detected absent ordinal " + i + " in " + this);
} }
} }
} }
@Override @Override
public String toString() { public String toString() {
StringBuilder str = new StringBuilder(); StringBuilder str = new StringBuilder();
str.append(iface.getSimpleName()).append("(").append(name.getName()).append(") ") str.append(iface.getSimpleName())
.append(type.name().toLowerCase()).append(":\n"); .append("(")
.append(name.getName())
.append(") ")
.append(type.name().toLowerCase())
.append(":\n");
for (HelenusProperty prop : getOrderedProperties()) { for (HelenusProperty prop : getOrderedProperties()) {
str.append(prop.toString()); str.append(prop.toString());
str.append("\n"); str.append("\n");
} }
return str.toString(); return str.toString();
} }
} }

View file

@ -15,16 +15,13 @@
*/ */
package net.helenus.mapping; package net.helenus.mapping;
import com.datastax.driver.core.Metadata;
import java.lang.annotation.Annotation; import java.lang.annotation.Annotation;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.lang.reflect.Type; import java.lang.reflect.Type;
import java.util.Optional; import java.util.Optional;
import java.util.function.Function; import java.util.function.Function;
import javax.validation.ConstraintValidator; import javax.validation.ConstraintValidator;
import com.datastax.driver.core.Metadata;
import net.helenus.core.SessionRepository; import net.helenus.core.SessionRepository;
import net.helenus.mapping.javatype.AbstractJavaType; import net.helenus.mapping.javatype.AbstractJavaType;
import net.helenus.mapping.javatype.MappingJavaTypes; import net.helenus.mapping.javatype.MappingJavaTypes;
@ -32,171 +29,172 @@ import net.helenus.mapping.type.AbstractDataType;
public final class HelenusMappingProperty implements HelenusProperty { public final class HelenusMappingProperty implements HelenusProperty {
private final HelenusEntity entity; private final HelenusEntity entity;
private final Method getter; private final Method getter;
private final String propertyName; private final String propertyName;
private final Optional<IdentityName> indexName; private final Optional<IdentityName> indexName;
private final boolean caseSensitiveIndex; private final boolean caseSensitiveIndex;
private final ColumnInformation columnInfo; private final ColumnInformation columnInfo;
private final Type genericJavaType; private final Type genericJavaType;
private final Class<?> javaType; private final Class<?> javaType;
private final AbstractJavaType abstractJavaType; private final AbstractJavaType abstractJavaType;
private final AbstractDataType dataType; private final AbstractDataType dataType;
private final ConstraintValidator<? extends Annotation, ?>[] validators; private final ConstraintValidator<? extends Annotation, ?>[] validators;
private volatile Optional<Function<Object, Object>> readConverter = null; private volatile Optional<Function<Object, Object>> readConverter = null;
private volatile Optional<Function<Object, Object>> writeConverter = null; private volatile Optional<Function<Object, Object>> writeConverter = null;
public HelenusMappingProperty(HelenusMappingEntity entity, Method getter, Metadata metadata) { public HelenusMappingProperty(HelenusMappingEntity entity, Method getter, Metadata metadata) {
this.entity = entity; this.entity = entity;
this.getter = getter; this.getter = getter;
this.propertyName = MappingUtil.getPropertyName(getter); this.propertyName = MappingUtil.getPropertyName(getter);
this.indexName = MappingUtil.getIndexName(getter); this.indexName = MappingUtil.getIndexName(getter);
this.caseSensitiveIndex = MappingUtil.caseSensitiveIndex(getter); this.caseSensitiveIndex = MappingUtil.caseSensitiveIndex(getter);
this.columnInfo = new ColumnInformation(getter); this.columnInfo = new ColumnInformation(getter);
this.genericJavaType = getter.getGenericReturnType(); this.genericJavaType = getter.getGenericReturnType();
this.javaType = getter.getReturnType(); this.javaType = getter.getReturnType();
this.abstractJavaType = MappingJavaTypes.resolveJavaType(this.javaType); this.abstractJavaType = MappingJavaTypes.resolveJavaType(this.javaType);
this.dataType = abstractJavaType.resolveDataType(this.getter, this.genericJavaType, this.dataType =
this.columnInfo.getColumnType(), metadata); abstractJavaType.resolveDataType(
this.getter, this.genericJavaType, this.columnInfo.getColumnType(), metadata);
this.validators = MappingUtil.getValidators(getter); this.validators = MappingUtil.getValidators(getter);
} }
@Override @Override
public HelenusEntity getEntity() { public HelenusEntity getEntity() {
return entity; return entity;
} }
@Override @Override
public Class<?> getJavaType() { public Class<?> getJavaType() {
return (Class<?>) javaType; return (Class<?>) javaType;
} }
@Override @Override
public AbstractDataType getDataType() { public AbstractDataType getDataType() {
return dataType; return dataType;
} }
@Override @Override
public ColumnType getColumnType() { public ColumnType getColumnType() {
return columnInfo.getColumnType(); return columnInfo.getColumnType();
} }
@Override @Override
public int getOrdinal() { public int getOrdinal() {
return columnInfo.getOrdinal(); return columnInfo.getOrdinal();
} }
@Override @Override
public OrderingDirection getOrdering() { public OrderingDirection getOrdering() {
return columnInfo.getOrdering(); return columnInfo.getOrdering();
} }
@Override @Override
public IdentityName getColumnName() { public IdentityName getColumnName() {
return columnInfo.getColumnName(); return columnInfo.getColumnName();
} }
@Override @Override
public Optional<IdentityName> getIndexName() { public Optional<IdentityName> getIndexName() {
return indexName; return indexName;
} }
@Override @Override
public boolean caseSensitiveIndex() { public boolean caseSensitiveIndex() {
return caseSensitiveIndex; return caseSensitiveIndex;
} }
@Override @Override
public String getPropertyName() { public String getPropertyName() {
return propertyName; return propertyName;
} }
@Override @Override
public Method getGetterMethod() { public Method getGetterMethod() {
return getter; return getter;
} }
@Override @Override
public Optional<Function<Object, Object>> getReadConverter(SessionRepository repository) { public Optional<Function<Object, Object>> getReadConverter(SessionRepository repository) {
if (readConverter == null) { if (readConverter == null) {
readConverter = abstractJavaType.resolveReadConverter(this.dataType, repository); readConverter = abstractJavaType.resolveReadConverter(this.dataType, repository);
} }
return readConverter; return readConverter;
} }
@Override @Override
public Optional<Function<Object, Object>> getWriteConverter(SessionRepository repository) { public Optional<Function<Object, Object>> getWriteConverter(SessionRepository repository) {
if (writeConverter == null) { if (writeConverter == null) {
writeConverter = abstractJavaType.resolveWriteConverter(this.dataType, repository); writeConverter = abstractJavaType.resolveWriteConverter(this.dataType, repository);
} }
return writeConverter; return writeConverter;
} }
@Override @Override
public ConstraintValidator<? extends Annotation, ?>[] getValidators() { public ConstraintValidator<? extends Annotation, ?>[] getValidators() {
return validators; return validators;
} }
@Override @Override
public String toString() { public String toString() {
StringBuilder str = new StringBuilder(); StringBuilder str = new StringBuilder();
String columnName = this.getColumnName().getName(); String columnName = this.getColumnName().getName();
str.append(" "); str.append(" ");
str.append(this.getDataType()); str.append(this.getDataType());
str.append(" "); str.append(" ");
str.append(this.getPropertyName()); str.append(this.getPropertyName());
str.append("("); str.append("(");
if (!columnName.equals(this.getPropertyName())) { if (!columnName.equals(this.getPropertyName())) {
str.append(columnName); str.append(columnName);
} }
str.append(") "); str.append(") ");
ColumnType type = this.getColumnType(); ColumnType type = this.getColumnType();
switch (type) { switch (type) {
case PARTITION_KEY : case PARTITION_KEY:
str.append("partition_key["); str.append("partition_key[");
str.append(this.getOrdinal()); str.append(this.getOrdinal());
str.append("] "); str.append("] ");
break; break;
case CLUSTERING_COLUMN : case CLUSTERING_COLUMN:
str.append("clustering_column["); str.append("clustering_column[");
str.append(this.getOrdinal()); str.append(this.getOrdinal());
str.append("] "); str.append("] ");
OrderingDirection od = this.getOrdering(); OrderingDirection od = this.getOrdering();
if (od != null) { if (od != null) {
str.append(od.name().toLowerCase()).append(" "); str.append(od.name().toLowerCase()).append(" ");
} }
break; break;
case STATIC_COLUMN : case STATIC_COLUMN:
str.append("static "); str.append("static ");
break; break;
case COLUMN : case COLUMN:
break; break;
} }
Optional<IdentityName> idx = this.getIndexName(); Optional<IdentityName> idx = this.getIndexName();
if (idx.isPresent()) { if (idx.isPresent()) {
str.append("index(").append(idx.get().getName()).append(") "); str.append("index(").append(idx.get().getName()).append(") ");
} }
return str.toString(); return str.toString();
} }
} }

View file

@ -19,39 +19,37 @@ import java.lang.annotation.Annotation;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.Optional; import java.util.Optional;
import java.util.function.Function; import java.util.function.Function;
import javax.validation.ConstraintValidator; import javax.validation.ConstraintValidator;
import net.helenus.core.SessionRepository; import net.helenus.core.SessionRepository;
import net.helenus.mapping.type.AbstractDataType; import net.helenus.mapping.type.AbstractDataType;
public interface HelenusProperty { public interface HelenusProperty {
HelenusEntity getEntity(); HelenusEntity getEntity();
String getPropertyName(); String getPropertyName();
Method getGetterMethod(); Method getGetterMethod();
IdentityName getColumnName(); IdentityName getColumnName();
Optional<IdentityName> getIndexName(); Optional<IdentityName> getIndexName();
boolean caseSensitiveIndex(); boolean caseSensitiveIndex();
Class<?> getJavaType(); Class<?> getJavaType();
AbstractDataType getDataType(); AbstractDataType getDataType();
ColumnType getColumnType(); ColumnType getColumnType();
int getOrdinal(); int getOrdinal();
OrderingDirection getOrdering(); OrderingDirection getOrdering();
Optional<Function<Object, Object>> getReadConverter(SessionRepository repository); Optional<Function<Object, Object>> getReadConverter(SessionRepository repository);
Optional<Function<Object, Object>> getWriteConverter(SessionRepository repository); Optional<Function<Object, Object>> getWriteConverter(SessionRepository repository);
ConstraintValidator<? extends Annotation, ?>[] getValidators(); ConstraintValidator<? extends Annotation, ?>[] getValidators();
} }

View file

@ -19,41 +19,41 @@ import net.helenus.support.CqlUtil;
public final class IdentityName { public final class IdentityName {
private final String name; private final String name;
private final boolean forceQuote; private final boolean forceQuote;
public IdentityName(String name, boolean forceQuote) { public IdentityName(String name, boolean forceQuote) {
this.name = name.toLowerCase(); this.name = name.toLowerCase();
this.forceQuote = forceQuote; this.forceQuote = forceQuote;
} }
public static IdentityName of(String name, boolean forceQuote) { public static IdentityName of(String name, boolean forceQuote) {
return new IdentityName(name, forceQuote); return new IdentityName(name, forceQuote);
} }
public String getName() { public String getName() {
return name; return name;
} }
public boolean isForceQuote() { public boolean isForceQuote() {
return forceQuote; return forceQuote;
} }
public String toCql(boolean overrideForceQuote) { public String toCql(boolean overrideForceQuote) {
if (overrideForceQuote) { if (overrideForceQuote) {
return CqlUtil.forceQuote(name); return CqlUtil.forceQuote(name);
} else { } else {
return name; return name;
} }
} }
public String toCql() { public String toCql() {
return toCql(forceQuote); return toCql(forceQuote);
} }
@Override @Override
public String toString() { public String toString() {
return toCql(); return toCql();
} }
} }

View file

@ -21,10 +21,8 @@ import java.lang.reflect.Method;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
import javax.validation.Constraint; import javax.validation.Constraint;
import javax.validation.ConstraintValidator; import javax.validation.ConstraintValidator;
import net.helenus.core.Getter; import net.helenus.core.Getter;
import net.helenus.core.Helenus; import net.helenus.core.Helenus;
import net.helenus.core.reflect.*; import net.helenus.core.reflect.*;
@ -34,290 +32,292 @@ import net.helenus.support.HelenusMappingException;
public final class MappingUtil { public final class MappingUtil {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public static final ConstraintValidator<? extends Annotation, ?>[] EMPTY_VALIDATORS = new ConstraintValidator[0]; public static final ConstraintValidator<? extends Annotation, ?>[] EMPTY_VALIDATORS =
new ConstraintValidator[0];
private MappingUtil() { private MappingUtil() {}
}
public static ConstraintValidator<? extends Annotation, ?>[] getValidators(Method getterMethod) { public static ConstraintValidator<? extends Annotation, ?>[] getValidators(Method getterMethod) {
List<ConstraintValidator<? extends Annotation, ?>> list = null; List<ConstraintValidator<? extends Annotation, ?>> list = null;
for (Annotation constraintAnnotation : getterMethod.getDeclaredAnnotations()) { for (Annotation constraintAnnotation : getterMethod.getDeclaredAnnotations()) {
list = addValidators(constraintAnnotation, list); list = addValidators(constraintAnnotation, list);
Class<? extends Annotation> annotationType = constraintAnnotation.annotationType(); Class<? extends Annotation> annotationType = constraintAnnotation.annotationType();
for (Annotation possibleConstraint : annotationType.getDeclaredAnnotations()) { for (Annotation possibleConstraint : annotationType.getDeclaredAnnotations()) {
list = addValidators(possibleConstraint, list); list = addValidators(possibleConstraint, list);
} }
} }
if (list == null) { if (list == null) {
return EMPTY_VALIDATORS; return EMPTY_VALIDATORS;
} else { } else {
return list.toArray(EMPTY_VALIDATORS); return list.toArray(EMPTY_VALIDATORS);
} }
} }
private static List<ConstraintValidator<? extends Annotation, ?>> addValidators(Annotation constraintAnnotation, private static List<ConstraintValidator<? extends Annotation, ?>> addValidators(
List<ConstraintValidator<? extends Annotation, ?>> list) { Annotation constraintAnnotation, List<ConstraintValidator<? extends Annotation, ?>> list) {
Class<? extends Annotation> annotationType = constraintAnnotation.annotationType(); Class<? extends Annotation> annotationType = constraintAnnotation.annotationType();
for (Annotation possibleConstraint : annotationType.getDeclaredAnnotations()) { for (Annotation possibleConstraint : annotationType.getDeclaredAnnotations()) {
if (possibleConstraint instanceof Constraint) { if (possibleConstraint instanceof Constraint) {
Constraint constraint = (Constraint) possibleConstraint; Constraint constraint = (Constraint) possibleConstraint;
for (Class<? extends ConstraintValidator<?, ?>> clazz : constraint.validatedBy()) { for (Class<? extends ConstraintValidator<?, ?>> clazz : constraint.validatedBy()) {
ConstraintValidator<? extends Annotation, ?> validator = ReflectionInstantiator ConstraintValidator<? extends Annotation, ?> validator =
.instantiateClass(clazz); ReflectionInstantiator.instantiateClass(clazz);
((ConstraintValidator) validator).initialize(constraintAnnotation); ((ConstraintValidator) validator).initialize(constraintAnnotation);
if (list == null) { if (list == null) {
list = new ArrayList<ConstraintValidator<? extends Annotation, ?>>(); list = new ArrayList<ConstraintValidator<? extends Annotation, ?>>();
} }
list.add(validator); list.add(validator);
} }
} }
} }
return list; return list;
} }
public static Optional<IdentityName> getIndexName(Method getterMethod) { public static Optional<IdentityName> getIndexName(Method getterMethod) {
String indexName = null; String indexName = null;
boolean forceQuote = false; boolean forceQuote = false;
Index index = getterMethod.getDeclaredAnnotation(Index.class); Index index = getterMethod.getDeclaredAnnotation(Index.class);
if (index != null) { if (index != null) {
indexName = index.value(); indexName = index.value();
forceQuote = index.forceQuote(); forceQuote = index.forceQuote();
if (indexName == null || indexName.isEmpty()) { if (indexName == null || indexName.isEmpty()) {
indexName = getDefaultColumnName(getterMethod); indexName = getDefaultColumnName(getterMethod);
} }
} }
return indexName != null ? Optional.of(new IdentityName(indexName, forceQuote)) : Optional.empty(); return indexName != null
} ? Optional.of(new IdentityName(indexName, forceQuote))
: Optional.empty();
}
public static boolean caseSensitiveIndex(Method getterMethod) { public static boolean caseSensitiveIndex(Method getterMethod) {
Index index = getterMethod.getDeclaredAnnotation(Index.class); Index index = getterMethod.getDeclaredAnnotation(Index.class);
if (index != null) { if (index != null) {
return index.caseSensitive(); return index.caseSensitive();
} }
return false; return false;
} }
public static String getPropertyName(Method getter) { public static String getPropertyName(Method getter) {
return getter.getName(); return getter.getName();
} }
public static String getDefaultColumnName(Method getter) { public static String getDefaultColumnName(Method getter) {
return Helenus.settings().getPropertyToColumnConverter().apply(getPropertyName(getter)); return Helenus.settings().getPropertyToColumnConverter().apply(getPropertyName(getter));
} }
public static IdentityName getUserDefinedTypeName(Class<?> iface, boolean required) { public static IdentityName getUserDefinedTypeName(Class<?> iface, boolean required) {
String userTypeName = null; String userTypeName = null;
boolean forceQuote = false; boolean forceQuote = false;
UDT userDefinedType = iface.getDeclaredAnnotation(UDT.class); UDT userDefinedType = iface.getDeclaredAnnotation(UDT.class);
if (userDefinedType != null) { if (userDefinedType != null) {
userTypeName = userDefinedType.value(); userTypeName = userDefinedType.value();
forceQuote = userDefinedType.forceQuote(); forceQuote = userDefinedType.forceQuote();
if (userTypeName == null || userTypeName.isEmpty()) { if (userTypeName == null || userTypeName.isEmpty()) {
userTypeName = getDefaultEntityName(iface); userTypeName = getDefaultEntityName(iface);
} }
return new IdentityName(userTypeName, forceQuote); return new IdentityName(userTypeName, forceQuote);
} }
if (required) { if (required) {
throw new HelenusMappingException("entity must have annotation @UserDefinedType " + iface); throw new HelenusMappingException("entity must have annotation @UserDefinedType " + iface);
} }
return null; return null;
} }
public static boolean isTuple(Class<?> iface) { public static boolean isTuple(Class<?> iface) {
Tuple tuple = iface.getDeclaredAnnotation(Tuple.class); Tuple tuple = iface.getDeclaredAnnotation(Tuple.class);
return tuple != null; return tuple != null;
} }
public static boolean isUDT(Class<?> iface) { public static boolean isUDT(Class<?> iface) {
UDT udt = iface.getDeclaredAnnotation(UDT.class); UDT udt = iface.getDeclaredAnnotation(UDT.class);
return udt != null; return udt != null;
} }
public static IdentityName getViewName(Class<?> iface, boolean required) { public static IdentityName getViewName(Class<?> iface, boolean required) {
String viewName = null; String viewName = null;
boolean forceQuote = false; boolean forceQuote = false;
MaterializedView view = iface.getDeclaredAnnotation(MaterializedView.class); MaterializedView view = iface.getDeclaredAnnotation(MaterializedView.class);
if (view != null) { if (view != null) {
viewName = view.value(); viewName = view.value();
forceQuote = view.forceQuote(); forceQuote = view.forceQuote();
} else if (required) { } else if (required) {
throw new HelenusMappingException("entity must have annotation @Table " + iface); throw new HelenusMappingException("entity must have annotation @Table " + iface);
} }
if (viewName == null || viewName.isEmpty()) { if (viewName == null || viewName.isEmpty()) {
viewName = getDefaultEntityName(iface); viewName = getDefaultEntityName(iface);
} }
return new IdentityName(viewName, forceQuote); return new IdentityName(viewName, forceQuote);
} }
public static IdentityName getTableName(Class<?> iface, boolean required) { public static IdentityName getTableName(Class<?> iface, boolean required) {
String tableName = null; String tableName = null;
boolean forceQuote = false; boolean forceQuote = false;
Table table = iface.getDeclaredAnnotation(Table.class); Table table = iface.getDeclaredAnnotation(Table.class);
if (table != null) { if (table != null) {
tableName = table.value(); tableName = table.value();
forceQuote = table.forceQuote(); forceQuote = table.forceQuote();
} else if (required) { } else if (required) {
throw new HelenusMappingException("entity must have annotation @Table " + iface); throw new HelenusMappingException("entity must have annotation @Table " + iface);
} }
if (tableName == null || tableName.isEmpty()) { if (tableName == null || tableName.isEmpty()) {
tableName = getDefaultEntityName(iface); tableName = getDefaultEntityName(iface);
} }
return new IdentityName(tableName, forceQuote); return new IdentityName(tableName, forceQuote);
} }
public static String getDefaultEntityName(Class<?> iface) { public static String getDefaultEntityName(Class<?> iface) {
return Helenus.settings().getPropertyToColumnConverter().apply(iface.getSimpleName()); return Helenus.settings().getPropertyToColumnConverter().apply(iface.getSimpleName());
} }
public static Class<?> getMappingInterface(Object pojo) { public static Class<?> getMappingInterface(Object pojo) {
Class<?> iface = null; Class<?> iface = null;
if (pojo instanceof Class) { if (pojo instanceof Class) {
iface = (Class<?>) pojo; iface = (Class<?>) pojo;
if (!iface.isInterface()) { if (!iface.isInterface()) {
throw new HelenusMappingException("expected interface " + iface); throw new HelenusMappingException("expected interface " + iface);
} }
} else { } else {
Class<?>[] ifaces = pojo.getClass().getInterfaces(); Class<?>[] ifaces = pojo.getClass().getInterfaces();
int len = ifaces.length; int len = ifaces.length;
for (int i = 0; i != len; ++i) { for (int i = 0; i != len; ++i) {
iface = ifaces[0]; iface = ifaces[0];
if (MapExportable.class.isAssignableFrom(iface)) { if (MapExportable.class.isAssignableFrom(iface)) {
continue; continue;
} }
if (iface.getDeclaredAnnotation(Table.class) != null if (iface.getDeclaredAnnotation(Table.class) != null
|| iface.getDeclaredAnnotation(MaterializedView.class) != null || iface.getDeclaredAnnotation(MaterializedView.class) != null
|| iface.getDeclaredAnnotation(UDT.class) != null || iface.getDeclaredAnnotation(UDT.class) != null
|| iface.getDeclaredAnnotation(Tuple.class) != null) { || iface.getDeclaredAnnotation(Tuple.class) != null) {
break; break;
} }
} }
} }
if (iface == null) { if (iface == null) {
throw new HelenusMappingException("dsl interface not found for " + pojo); throw new HelenusMappingException("dsl interface not found for " + pojo);
} }
return iface; return iface;
} }
public static HelenusPropertyNode resolveMappingProperty(Getter<?> getter) { public static HelenusPropertyNode resolveMappingProperty(Getter<?> getter) {
try { try {
Object childDsl = getter.get(); Object childDsl = getter.get();
if (childDsl instanceof DslExportable) { if (childDsl instanceof DslExportable) {
DslExportable e = (DslExportable) childDsl; DslExportable e = (DslExportable) childDsl;
return e.getParentDslHelenusPropertyNode(); return e.getParentDslHelenusPropertyNode();
} else if (childDsl instanceof MapDsl) { } else if (childDsl instanceof MapDsl) {
MapDsl mapDsl = (MapDsl) childDsl; MapDsl mapDsl = (MapDsl) childDsl;
return mapDsl.getParent(); return mapDsl.getParent();
} else if (childDsl instanceof ListDsl) { } else if (childDsl instanceof ListDsl) {
ListDsl listDsl = (ListDsl) childDsl; ListDsl listDsl = (ListDsl) childDsl;
return listDsl.getParent(); return listDsl.getParent();
} else if (childDsl instanceof SetDsl) { } else if (childDsl instanceof SetDsl) {
SetDsl setDsl = (SetDsl) childDsl; SetDsl setDsl = (SetDsl) childDsl;
return setDsl.getParent(); return setDsl.getParent();
} }
throw new HelenusMappingException("getter must reference to the dsl object " + getter); throw new HelenusMappingException("getter must reference to the dsl object " + getter);
} catch (DslPropertyException e) { } catch (DslPropertyException e) {
return e.getPropertyNode(); return e.getPropertyNode();
} }
} }
// https://stackoverflow.com/a/4882306/366692 // https://stackoverflow.com/a/4882306/366692
public static <T> T clone(T object) throws CloneNotSupportedException { public static <T> T clone(T object) throws CloneNotSupportedException {
Object clone = null; Object clone = null;
// Use reflection, because there is no other way // Use reflection, because there is no other way
try { try {
Method method = object.getClass().getMethod("clone"); Method method = object.getClass().getMethod("clone");
clone = method.invoke(object); clone = method.invoke(object);
} catch (InvocationTargetException e) { } catch (InvocationTargetException e) {
rethrow(e.getCause()); rethrow(e.getCause());
} catch (Exception cause) { } catch (Exception cause) {
rethrow(cause); rethrow(cause);
} }
if (object.getClass().isInstance(clone)) { if (object.getClass().isInstance(clone)) {
@SuppressWarnings("unchecked") // clone class <= object class <= T @SuppressWarnings("unchecked") // clone class <= object class <= T
T t = (T) clone; T t = (T) clone;
return t; return t;
} else { } else {
throw new ClassCastException(clone.getClass().getName()); throw new ClassCastException(clone.getClass().getName());
} }
} }
private static void rethrow(Throwable cause) throws CloneNotSupportedException { private static void rethrow(Throwable cause) throws CloneNotSupportedException {
if (cause instanceof RuntimeException) { if (cause instanceof RuntimeException) {
throw (RuntimeException) cause; throw (RuntimeException) cause;
} }
if (cause instanceof Error) { if (cause instanceof Error) {
throw (Error) cause; throw (Error) cause;
} }
if (cause instanceof CloneNotSupportedException) { if (cause instanceof CloneNotSupportedException) {
throw (CloneNotSupportedException) cause; throw (CloneNotSupportedException) cause;
} }
CloneNotSupportedException e = new CloneNotSupportedException(); CloneNotSupportedException e = new CloneNotSupportedException();
e.initCause(cause); e.initCause(cause);
throw e; throw e;
} }
} }

View file

@ -18,28 +18,28 @@ package net.helenus.mapping;
import net.helenus.support.HelenusMappingException; import net.helenus.support.HelenusMappingException;
public enum OrderingDirection { public enum OrderingDirection {
ASC("ASC"), ASC("ASC"),
DESC("DESC"); DESC("DESC");
private final String cql; private final String cql;
private OrderingDirection(String cql) { private OrderingDirection(String cql) {
this.cql = cql; this.cql = cql;
} }
public static OrderingDirection parseString(String name) { public static OrderingDirection parseString(String name) {
if (ASC.cql.equalsIgnoreCase(name)) { if (ASC.cql.equalsIgnoreCase(name)) {
return ASC; return ASC;
} else if (DESC.cql.equalsIgnoreCase(name)) { } else if (DESC.cql.equalsIgnoreCase(name)) {
return DESC; return DESC;
} }
throw new HelenusMappingException("invalid ordering direction name " + name); throw new HelenusMappingException("invalid ordering direction name " + name);
} }
public String cql() { public String cql() {
return cql; return cql;
} }
} }

View file

@ -18,16 +18,17 @@ package net.helenus.mapping;
import java.util.Comparator; import java.util.Comparator;
public enum TypeAndOrdinalColumnComparator implements Comparator<HelenusProperty> { public enum TypeAndOrdinalColumnComparator implements Comparator<HelenusProperty> {
INSTANCE; INSTANCE;
public int compare(HelenusProperty thisVal, HelenusProperty anotherVal) { public int compare(HelenusProperty thisVal, HelenusProperty anotherVal) {
int c = Integer.compare(thisVal.getColumnType().ordinal(), anotherVal.getColumnType().ordinal()); int c =
Integer.compare(thisVal.getColumnType().ordinal(), anotherVal.getColumnType().ordinal());
if (c == 0) { if (c == 0) {
c = Integer.compare(thisVal.getOrdinal(), anotherVal.getOrdinal()); c = Integer.compare(thisVal.getOrdinal(), anotherVal.getOrdinal());
} }
return c; return c;
} }
} }

View file

@ -19,93 +19,78 @@ import java.lang.annotation.ElementType;
import java.lang.annotation.Retention; import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy; import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target; import java.lang.annotation.Target;
import net.helenus.mapping.OrderingDirection; import net.helenus.mapping.OrderingDirection;
/** /**
* ClusteringColumn is the family column in legacy Cassandra API * ClusteringColumn is the family column in legacy Cassandra API
* *
* <p> * <p>The purpose of this column is have additional dimension in the table. Both @PartitionKey
* The purpose of this column is have additional dimension in the table. * and @ClusteringColumn together are parts of the primary key of the table. The primary difference
* Both @PartitionKey and @ClusteringColumn together are parts of the primary * between them is that the first one is using for routing purposes in order to locate a data node
* key of the table. The primary difference between them is that the first one * in the cluster, otherwise the second one is using inside the node to locate peace of data in
* is using for routing purposes in order to locate a data node in the cluster,
* otherwise the second one is using inside the node to locate peace of data in
* concrete machine. * concrete machine.
* *
* <p> * <p>ClusteringColumn can be represented as a Key in SortedMap that fully stored in a single node.
* ClusteringColumn can be represented as a Key in SortedMap that fully stored * All developers must be careful for selecting fields for clustering columns, because all data
* in a single node. All developers must be careful for selecting fields for * inside this SortedMap must fit in to one node.
* clustering columns, because all data inside this SortedMap must fit in to one
* node.
* *
* <p> * <p>ClusteringColumn can have more than one part and the order of parts is important. This order
* ClusteringColumn can have more than one part and the order of parts is * defines the way how Cassandra joins the parts and influence of data retrieval operations. Each
* important. This order defines the way how Cassandra joins the parts and * part can have ordering property that defines default ascending or descending order of data. In
* influence of data retrieval operations. Each part can have ordering property * case of two and more parts in select queries developer needs to have consisdent order of all
* that defines default ascending or descending order of data. In case of two * parts as they defined in table.
* and more parts in select queries developer needs to have consisdent order of
* all parts as they defined in table.
* *
* <p> * <p>For example, first part is ASC ordering, second is also ASC, so Cassandra will sort entries
* For example, first part is ASC ordering, second is also ASC, so Cassandra * like this: a-a a-b b-a b-b In this case we are able run queries: ORDER BY first ASC, second ASC
* will sort entries like this: a-a a-b b-a b-b In this case we are able run * ORDER BY first DESC, second DESC WHERE first=? ORDER BY second ASC WHERE first=? ORDER BY second
* queries: ORDER BY first ASC, second ASC ORDER BY first DESC, second DESC * DESC WHERE first=? AND second=?
* WHERE first=? ORDER BY second ASC WHERE first=? ORDER BY second DESC WHERE
* first=? AND second=?
* *
* <p> * <p>But, we can not run queries: ORDER BY first DESC, second ASC ORDER BY first ASC, second DESC
* But, we can not run queries: ORDER BY first DESC, second ASC ORDER BY first * WHERE second=? ORDER BY first (ASC,DESC)
* ASC, second DESC WHERE second=? ORDER BY first (ASC,DESC)
*/ */
@Retention(value = RetentionPolicy.RUNTIME) @Retention(value = RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface ClusteringColumn { public @interface ClusteringColumn {
/** /**
* Default value is the name of the method normalized to underscore * Default value is the name of the method normalized to underscore
* *
* @return name of the column * @return name of the column
*/ */
String value() default ""; String value() default "";
/** /**
* ClusteringColumn parts must be ordered in the @Table. It is the requirement * ClusteringColumn parts must be ordered in the @Table. It is the requirement of Cassandra.
* of Cassandra. Cassandra joins all parts to the final clustering key that is * Cassandra joins all parts to the final clustering key that is stored in column family name.
* stored in column family name. Additionally all parts can have some ordering * Additionally all parts can have some ordering (ASC, DESC) that with sequence of parts
* (ASC, DESC) that with sequence of parts determines key comparison function, * determines key comparison function, so Cassandra storing column family names always in sorted
* so Cassandra storing column family names always in sorted order. * order.
* *
* <p> * <p>Be default ordinal has 0 value, that's because in most cases @Table have single column for
* Be default ordinal has 0 value, that's because in most cases @Table have * ClusteringColumn If you have 2 and more parts of the ClusteringColumn, then you need to use
* single column for ClusteringColumn If you have 2 and more parts of the * ordinal() to define the sequence of the parts
* ClusteringColumn, then you need to use ordinal() to define the sequence of *
* the parts * @return number that used to sort clustering columns
* */
* @return number that used to sort clustering columns int ordinal() default 0;
*/
int ordinal() default 0;
/** /**
* Default order of values in the ClusteringColumn This ordering is using for * Default order of values in the ClusteringColumn This ordering is using for comparison of the
* comparison of the clustering column values when Cassandra stores it in the * clustering column values when Cassandra stores it in the sorted order.
* sorted order. *
* * <p>Default value is the ascending order
* <p> *
* Default value is the ascending order * @return ascending order or descending order of clustering column values
* */
* @return ascending order or descending order of clustering column values OrderingDirection ordering() default OrderingDirection.ASC;
*/
OrderingDirection ordering() default OrderingDirection.ASC;
/** /**
* For reserved words in Cassandra we need quotation in CQL queries. This * For reserved words in Cassandra we need quotation in CQL queries. This property marks that the
* property marks that the name of the UDT type needs to be quoted. * name of the UDT type needs to be quoted.
* *
* <p> * <p>Default value is false, we are quoting only selected names.
* Default value is false, we are quoting only selected names. *
* * @return true if name have to be quoted
* @return true if name have to be quoted */
*/ boolean forceQuote() default false;
boolean forceQuote() default false;
} }

View file

@ -18,51 +18,45 @@ package net.helenus.mapping.annotation;
import java.lang.annotation.*; import java.lang.annotation.*;
/** /**
* Column annotation is used to define additional properties of the column in * Column annotation is used to define additional properties of the column in entity mapping
* entity mapping interfaces: @Table, @UDT, @Tuple * interfaces: @Table, @UDT, @Tuple
* *
* <p> * <p>Column annotation can be used to override default name of the column or to setup order of the
* Column annotation can be used to override default name of the column or to * columns in the mapping
* setup order of the columns in the mapping
* *
* <p> * <p>Usually for @Table and @UDT types it is not important to define order of the columns, but
* Usually for @Table and @UDT types it is not important to define order of the * in @Tuple mapping it is required, because tuple itself represents the sequence of the types with
* columns, but in @Tuple mapping it is required, because tuple itself * particular order in the table's column
* represents the sequence of the types with particular order in the table's
* column
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Column { public @interface Column {
/** /**
* Default value is the name of the method normalized to underscore * Default value is the name of the method normalized to underscore
* *
* @return name of the column * @return name of the column
*/ */
String value() default ""; String value() default "";
/** /**
* Ordinal will be used for ascending sorting of columns * Ordinal will be used for ascending sorting of columns
* *
* <p> * <p>Default value is 0, because not all mapping entities require all fields to have unique
* Default value is 0, because not all mapping entities require all fields to * ordinals, only @Tuple mapping entity requires all of them to be unique.
* have unique ordinals, only @Tuple mapping entity requires all of them to be *
* unique. * @return number that used to sort columns, usually for @Tuple only
* */
* @return number that used to sort columns, usually for @Tuple only int ordinal() default 0;
*/
int ordinal() default 0;
/** /**
* For reserved words in Cassandra we need quotation in CQL queries. This * For reserved words in Cassandra we need quotation in CQL queries. This property marks that the
* property marks that the name of the UDT type needs to be quoted. * name of the UDT type needs to be quoted.
* *
* <p> * <p>Default value is false, we are quoting only selected names.
* Default value is false, we are quoting only selected names. *
* * @return true if name have to be quoted
* @return true if name have to be quoted */
*/ boolean forceQuote() default false;
boolean forceQuote() default false;
} }

Some files were not shown because too many files have changed in this diff Show more