Merge branch 'develop'

This commit is contained in:
Greg Burd 2017-10-23 14:48:33 -04:00
commit dcc0927a4a
232 changed files with 11166 additions and 10234 deletions

57
NOTES
View file

@ -1,3 +1,8 @@
--- Cache --- Cache
// `E` is the type of the Entity class or one of: // `E` is the type of the Entity class or one of:
// - ResultSet // - ResultSet
@ -315,3 +320,55 @@ begin:
} }
}; };
} }
----------------------------------
if ("ttl".equals(methodName) && method.getParameterCount() == 1 && method.getReturnType() == int.class) {
Getter getter = (Getter) args[0];
if (getter == null) {
return false;
}
HelenusProperty prop = MappingUtil.resolveMappingProperty(getter).getProperty();
String getterName = prop.getPropertyName();
String ttlKeyForProperty = prop.getColumnName().toCql() + "_ttl";
if (src.containsKey(ttlKeyForProperty)) {
return src.get(ttlKeyForProperty);
} else {
return 0;
}
}
if ("written".equals(methodName) && method.getParameterCount() == 1 && method.getReturnType() == int.class) {
Getter getter = (Getter) args[0];
if (getter == null) {
return false;
}
HelenusProperty prop = MappingUtil.resolveMappingProperty(getter).getProperty();
String getterName = prop.getPropertyName();
String ttlKeyForProperty = prop.getColumnName().toCql() + "_ttl";
if (src.containsKey(ttlKeyForProperty)) {
return src.get(ttlKeyForProperty);
} else {
return 0;
}
}
-----------------
/*else {
Cache<String, Object> cache = session.getSessionCache();
Map<String, Object> rowMap = this.cache.rowMap();
for (String rowKey : rowMap.keySet()) {
String keys = flattenFacets(facets);
for (String key : keys) {
Object value = cache.getIfPresent(key);
if (value != null) {
result = Optional.of(value);
break;
}
}
}
cache.put
}
*/

View file

@ -64,7 +64,6 @@ dependencies {
compile group: 'org.aspectj', name: 'aspectjweaver', version: '1.8.10' compile group: 'org.aspectj', name: 'aspectjweaver', version: '1.8.10'
compile group: 'org.apache.commons', name: 'commons-lang3', version: '3.6' compile group: 'org.apache.commons', name: 'commons-lang3', version: '3.6'
compile group: 'org.springframework', name: 'spring-core', version: '4.3.10.RELEASE' compile group: 'org.springframework', name: 'spring-core', version: '4.3.10.RELEASE'
compile group: 'com.google.guava', name: 'guava', version: '20.0' compile group: 'com.google.guava', name: 'guava', version: '20.0'
compile group: 'com.diffplug.durian', name: 'durian', version: '3.+' compile group: 'com.diffplug.durian', name: 'durian', version: '3.+'
compile group: 'io.zipkin.java', name: 'zipkin', version: '1.29.2' compile group: 'io.zipkin.java', name: 'zipkin', version: '1.29.2'

View file

@ -5,19 +5,19 @@ import java.util.List;
public class DefaultMetadata extends Metadata { public class DefaultMetadata extends Metadata {
public DefaultMetadata() { public DefaultMetadata() {
super(null); super(null);
} }
private DefaultMetadata(Cluster.Manager cluster) { private DefaultMetadata(Cluster.Manager cluster) {
super(cluster); super(cluster);
} }
public TupleType newTupleType(DataType... types) { public TupleType newTupleType(DataType... types) {
return newTupleType(Arrays.asList(types)); return newTupleType(Arrays.asList(types));
} }
public TupleType newTupleType(List<DataType> types) { public TupleType newTupleType(List<DataType> types) {
return new TupleType(types, ProtocolVersion.NEWEST_SUPPORTED, CodecRegistry.DEFAULT_INSTANCE); return new TupleType(types, ProtocolVersion.NEWEST_SUPPORTED, CodecRegistry.DEFAULT_INSTANCE);
} }
} }

View file

@ -15,34 +15,35 @@
*/ */
package com.datastax.driver.core.querybuilder; package com.datastax.driver.core.querybuilder;
import com.datastax.driver.core.CodecRegistry;
import java.util.List; import java.util.List;
import com.datastax.driver.core.CodecRegistry;
public class IsNotNullClause extends Clause { public class IsNotNullClause extends Clause {
final String name; final String name;
public IsNotNullClause(String name) { public IsNotNullClause(String name) {
this.name = name; this.name = name;
} }
@Override @Override
String name() { String name() {
return name; return name;
} }
@Override @Override
Object firstValue() { Object firstValue() {
return null; return null;
} }
@Override @Override
void appendTo(StringBuilder sb, List<Object> variables, CodecRegistry codecRegistry) { void appendTo(StringBuilder sb, List<Object> variables, CodecRegistry codecRegistry) {
Utils.appendName(name, sb).append(" IS NOT NULL"); Utils.appendName(name, sb).append(" IS NOT NULL");
} }
@Override @Override
boolean containsBindMarker() { boolean containsBindMarker() {
return false; return false;
} }
} }

View file

@ -1,157 +1,148 @@
package com.datastax.driver.core.schemabuilder; package com.datastax.driver.core.schemabuilder;
import static com.datastax.driver.core.schemabuilder.SchemaStatement.STATEMENT_START; import static com.datastax.driver.core.schemabuilder.SchemaStatement.*;
import static com.datastax.driver.core.schemabuilder.SchemaStatement.validateNotEmpty;
import static com.datastax.driver.core.schemabuilder.SchemaStatement.validateNotKeyWord;
import com.google.common.base.Optional; import com.google.common.base.Optional;
public class CreateCustomIndex extends CreateIndex { public class CreateCustomIndex extends CreateIndex {
private String indexName; private String indexName;
private boolean ifNotExists = false; private boolean ifNotExists = false;
private Optional<String> keyspaceName = Optional.absent(); private Optional<String> keyspaceName = Optional.absent();
private String tableName; private String tableName;
private String columnName; private String columnName;
private boolean keys; private boolean keys;
CreateCustomIndex(String indexName) { CreateCustomIndex(String indexName) {
super(indexName); super(indexName);
validateNotEmpty(indexName, "Index name"); validateNotEmpty(indexName, "Index name");
validateNotKeyWord( validateNotKeyWord(indexName,
indexName, String.format("The index name '%s' is not allowed because it is a reserved keyword", indexName));
String.format( this.indexName = indexName;
"The index name '%s' is not allowed because it is a reserved keyword", indexName)); }
this.indexName = indexName;
}
/** /**
* Add the 'IF NOT EXISTS' condition to this CREATE INDEX statement. * Add the 'IF NOT EXISTS' condition to this CREATE INDEX statement.
* *
* @return this CREATE INDEX statement. * @return this CREATE INDEX statement.
*/ */
public CreateIndex ifNotExists() { public CreateIndex ifNotExists() {
this.ifNotExists = true; this.ifNotExists = true;
return this; return this;
} }
/** /**
* Specify the keyspace and table to create the index on. * Specify the keyspace and table to create the index on.
* *
* @param keyspaceName the keyspace name. * @param keyspaceName
* @param tableName the table name. * the keyspace name.
* @return a {@link CreateIndex.CreateIndexOn} that will allow the specification of the column. * @param tableName
*/ * the table name.
public CreateIndex.CreateIndexOn onTable(String keyspaceName, String tableName) { * @return a {@link CreateIndex.CreateIndexOn} that will allow the specification
validateNotEmpty(keyspaceName, "Keyspace name"); * of the column.
validateNotEmpty(tableName, "Table name"); */
validateNotKeyWord( public CreateIndex.CreateIndexOn onTable(String keyspaceName, String tableName) {
keyspaceName, validateNotEmpty(keyspaceName, "Keyspace name");
String.format( validateNotEmpty(tableName, "Table name");
"The keyspace name '%s' is not allowed because it is a reserved keyword", validateNotKeyWord(keyspaceName,
keyspaceName)); String.format("The keyspace name '%s' is not allowed because it is a reserved keyword", keyspaceName));
validateNotKeyWord( validateNotKeyWord(tableName,
tableName, String.format("The table name '%s' is not allowed because it is a reserved keyword", tableName));
String.format( this.keyspaceName = Optional.fromNullable(keyspaceName);
"The table name '%s' is not allowed because it is a reserved keyword", tableName)); this.tableName = tableName;
this.keyspaceName = Optional.fromNullable(keyspaceName); return new CreateCustomIndex.CreateIndexOn();
this.tableName = tableName; }
return new CreateCustomIndex.CreateIndexOn();
}
/** /**
* Specify the table to create the index on. * Specify the table to create the index on.
* *
* @param tableName the table name. * @param tableName
* @return a {@link CreateIndex.CreateIndexOn} that will allow the specification of the column. * the table name.
*/ * @return a {@link CreateIndex.CreateIndexOn} that will allow the specification
public CreateIndex.CreateIndexOn onTable(String tableName) { * of the column.
validateNotEmpty(tableName, "Table name"); */
validateNotKeyWord( public CreateIndex.CreateIndexOn onTable(String tableName) {
tableName, validateNotEmpty(tableName, "Table name");
String.format( validateNotKeyWord(tableName,
"The table name '%s' is not allowed because it is a reserved keyword", tableName)); String.format("The table name '%s' is not allowed because it is a reserved keyword", tableName));
this.tableName = tableName; this.tableName = tableName;
return new CreateCustomIndex.CreateIndexOn(); return new CreateCustomIndex.CreateIndexOn();
} }
public class CreateIndexOn extends CreateIndex.CreateIndexOn { String getCustomClassName() {
/** return "";
* Specify the column to create the index on. }
*
* @param columnName the column name.
* @return the final CREATE INDEX statement.
*/
public SchemaStatement andColumn(String columnName) {
validateNotEmpty(columnName, "Column name");
validateNotKeyWord(
columnName,
String.format(
"The column name '%s' is not allowed because it is a reserved keyword", columnName));
CreateCustomIndex.this.columnName = columnName;
return SchemaStatement.fromQueryString(buildInternal());
}
/** String getOptions() {
* Create an index on the keys of the given map column. return "";
* }
* @param columnName the column name.
* @return the final CREATE INDEX statement.
*/
public SchemaStatement andKeysOfColumn(String columnName) {
validateNotEmpty(columnName, "Column name");
validateNotKeyWord(
columnName,
String.format(
"The column name '%s' is not allowed because it is a reserved keyword", columnName));
CreateCustomIndex.this.columnName = columnName;
CreateCustomIndex.this.keys = true;
return SchemaStatement.fromQueryString(buildInternal());
}
}
String getCustomClassName() { @Override
return ""; public String buildInternal() {
} StringBuilder createStatement = new StringBuilder(STATEMENT_START).append("CREATE CUSTOM INDEX ");
String getOptions() { if (ifNotExists) {
return ""; createStatement.append("IF NOT EXISTS ");
} }
@Override createStatement.append(indexName).append(" ON ");
public String buildInternal() {
StringBuilder createStatement =
new StringBuilder(STATEMENT_START).append("CREATE CUSTOM INDEX ");
if (ifNotExists) { if (keyspaceName.isPresent()) {
createStatement.append("IF NOT EXISTS "); createStatement.append(keyspaceName.get()).append(".");
} }
createStatement.append(tableName);
createStatement.append(indexName).append(" ON "); createStatement.append("(");
if (keys) {
createStatement.append("KEYS(");
}
if (keyspaceName.isPresent()) { createStatement.append(columnName);
createStatement.append(keyspaceName.get()).append(".");
}
createStatement.append(tableName);
createStatement.append("("); if (keys) {
if (keys) { createStatement.append(")");
createStatement.append("KEYS("); }
} createStatement.append(")");
createStatement.append(columnName); createStatement.append(" USING '");
createStatement.append(getCustomClassName());
createStatement.append("' WITH OPTIONS = {");
createStatement.append(getOptions());
createStatement.append(" }");
if (keys) { return createStatement.toString();
createStatement.append(")"); }
}
createStatement.append(")");
createStatement.append(" USING '"); public class CreateIndexOn extends CreateIndex.CreateIndexOn {
createStatement.append(getCustomClassName()); /**
createStatement.append("' WITH OPTIONS = {"); * Specify the column to create the index on.
createStatement.append(getOptions()); *
createStatement.append(" }"); * @param columnName
* the column name.
* @return the final CREATE INDEX statement.
*/
public SchemaStatement andColumn(String columnName) {
validateNotEmpty(columnName, "Column name");
validateNotKeyWord(columnName,
String.format("The column name '%s' is not allowed because it is a reserved keyword", columnName));
CreateCustomIndex.this.columnName = columnName;
return SchemaStatement.fromQueryString(buildInternal());
}
return createStatement.toString(); /**
} * Create an index on the keys of the given map column.
*
* @param columnName
* the column name.
* @return the final CREATE INDEX statement.
*/
public SchemaStatement andKeysOfColumn(String columnName) {
validateNotEmpty(columnName, "Column name");
validateNotKeyWord(columnName,
String.format("The column name '%s' is not allowed because it is a reserved keyword", columnName));
CreateCustomIndex.this.columnName = columnName;
CreateCustomIndex.this.keys = true;
return SchemaStatement.fromQueryString(buildInternal());
}
}
} }

View file

@ -5,49 +5,48 @@ import com.datastax.driver.core.querybuilder.Select;
public class CreateMaterializedView extends Create { public class CreateMaterializedView extends Create {
private String viewName; private String viewName;
private Select.Where selection; private Select.Where selection;
private String primaryKey; private String primaryKey;
private String clustering; private String clustering;
public CreateMaterializedView( public CreateMaterializedView(String keyspaceName, String viewName, Select.Where selection, String primaryKey,
String keyspaceName, String viewName, Select.Where selection, String primaryKey, String clustering) { String clustering) {
super(keyspaceName, viewName); super(keyspaceName, viewName);
this.viewName = viewName; this.viewName = viewName;
this.selection = selection; this.selection = selection;
this.primaryKey = primaryKey; this.primaryKey = primaryKey;
this.clustering = clustering; this.clustering = clustering;
} }
public String getQueryString(CodecRegistry codecRegistry) { public String getQueryString(CodecRegistry codecRegistry) {
return buildInternal(); return buildInternal();
} }
public String buildInternal() { public String buildInternal() {
StringBuilder createStatement = StringBuilder createStatement = new StringBuilder(STATEMENT_START).append("CREATE MATERIALIZED VIEW");
new StringBuilder(STATEMENT_START).append("CREATE MATERIALIZED VIEW"); if (ifNotExists) {
if (ifNotExists) { createStatement.append(" IF NOT EXISTS");
createStatement.append(" IF NOT EXISTS"); }
} createStatement.append(" ");
createStatement.append(" "); if (keyspaceName.isPresent()) {
if (keyspaceName.isPresent()) { createStatement.append(keyspaceName.get()).append(".");
createStatement.append(keyspaceName.get()).append("."); }
} createStatement.append(viewName);
createStatement.append(viewName); createStatement.append(" AS ");
createStatement.append(" AS "); createStatement.append(selection.getQueryString());
createStatement.append(selection.getQueryString()); createStatement.setLength(createStatement.length() - 1);
createStatement.setLength(createStatement.length() - 1); createStatement.append(" ");
createStatement.append(" "); createStatement.append(primaryKey);
createStatement.append(primaryKey); if (clustering != null) {
if (clustering != null) { createStatement.append(" ").append(clustering);
createStatement.append(" ").append(clustering); }
} createStatement.append(";");
createStatement.append(";");
return createStatement.toString(); return createStatement.toString();
} }
public String toString() { public String toString() {
return buildInternal(); return buildInternal();
} }
} }

View file

@ -2,17 +2,16 @@ package com.datastax.driver.core.schemabuilder;
public class CreateSasiIndex extends CreateCustomIndex { public class CreateSasiIndex extends CreateCustomIndex {
public CreateSasiIndex(String indexName) { public CreateSasiIndex(String indexName) {
super(indexName); super(indexName);
} }
String getCustomClassName() { String getCustomClassName() {
return "org.apache.cassandra.index.sasi.SASIIndex"; return "org.apache.cassandra.index.sasi.SASIIndex";
} }
String getOptions() { String getOptions() {
return "'analyzer_class': " return "'analyzer_class': " + "'org.apache.cassandra.index.sasi.analyzer.NonTokenizingAnalyzer', "
+ "'org.apache.cassandra.index.sasi.analyzer.NonTokenizingAnalyzer', " + "'case_sensitive': 'false'";
+ "'case_sensitive': 'false'"; }
}
} }

View file

@ -20,19 +20,19 @@ import com.datastax.driver.core.CodecRegistry;
/** A built CREATE TABLE statement. */ /** A built CREATE TABLE statement. */
public class CreateTable extends Create { public class CreateTable extends Create {
public CreateTable(String keyspaceName, String tableName) { public CreateTable(String keyspaceName, String tableName) {
super(keyspaceName, tableName); super(keyspaceName, tableName);
} }
public CreateTable(String tableName) { public CreateTable(String tableName) {
super(tableName); super(tableName);
} }
public String getQueryString(CodecRegistry codecRegistry) { public String getQueryString(CodecRegistry codecRegistry) {
return buildInternal(); return buildInternal();
} }
public String toString() { public String toString() {
return buildInternal(); return buildInternal();
} }
} }

View file

@ -4,50 +4,46 @@ import com.google.common.base.Optional;
public class DropMaterializedView extends Drop { public class DropMaterializedView extends Drop {
enum DroppedItem { private final String itemType = "MATERIALIZED VIEW";
TABLE, private Optional<String> keyspaceName = Optional.absent();
TYPE, private String itemName;
INDEX, private boolean ifExists = true;
MATERIALIZED_VIEW public DropMaterializedView(String keyspaceName, String viewName) {
} this(keyspaceName, viewName, DroppedItem.MATERIALIZED_VIEW);
}
private Optional<String> keyspaceName = Optional.absent(); private DropMaterializedView(String keyspaceName, String viewName, DroppedItem itemType) {
private String itemName; super(keyspaceName, viewName, Drop.DroppedItem.TABLE);
private boolean ifExists = true; validateNotEmpty(keyspaceName, "Keyspace name");
private final String itemType = "MATERIALIZED VIEW"; this.keyspaceName = Optional.fromNullable(keyspaceName);
this.itemName = viewName;
}
public DropMaterializedView(String keyspaceName, String viewName) { /**
this(keyspaceName, viewName, DroppedItem.MATERIALIZED_VIEW); * Add the 'IF EXISTS' condition to this DROP statement.
} *
* @return this statement.
*/
public Drop ifExists() {
this.ifExists = true;
return this;
}
private DropMaterializedView(String keyspaceName, String viewName, DroppedItem itemType) { @Override
super(keyspaceName, viewName, Drop.DroppedItem.TABLE); public String buildInternal() {
validateNotEmpty(keyspaceName, "Keyspace name"); StringBuilder dropStatement = new StringBuilder("DROP " + itemType + " ");
this.keyspaceName = Optional.fromNullable(keyspaceName); if (ifExists) {
this.itemName = viewName; dropStatement.append("IF EXISTS ");
} }
if (keyspaceName.isPresent()) {
dropStatement.append(keyspaceName.get()).append(".");
}
/** dropStatement.append(itemName);
* Add the 'IF EXISTS' condition to this DROP statement. return dropStatement.toString();
* }
* @return this statement.
*/
public Drop ifExists() {
this.ifExists = true;
return this;
}
@Override enum DroppedItem {
public String buildInternal() { TABLE, TYPE, INDEX, MATERIALIZED_VIEW
StringBuilder dropStatement = new StringBuilder("DROP " + itemType + " "); }
if (ifExists) {
dropStatement.append("IF EXISTS ");
}
if (keyspaceName.isPresent()) {
dropStatement.append(keyspaceName.get()).append(".");
}
dropStatement.append(itemName);
return dropStatement.toString();
}
} }

View file

@ -17,6 +17,7 @@ package net.helenus.config;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.function.Function; import java.util.function.Function;
import net.helenus.core.DslInstantiator; import net.helenus.core.DslInstantiator;
import net.helenus.core.MapperInstantiator; import net.helenus.core.MapperInstantiator;
import net.helenus.core.reflect.ReflectionDslInstantiator; import net.helenus.core.reflect.ReflectionDslInstantiator;
@ -25,23 +26,23 @@ import net.helenus.mapping.convert.CamelCaseToUnderscoreConverter;
public class DefaultHelenusSettings implements HelenusSettings { public class DefaultHelenusSettings implements HelenusSettings {
@Override @Override
public Function<String, String> getPropertyToColumnConverter() { public Function<String, String> getPropertyToColumnConverter() {
return CamelCaseToUnderscoreConverter.INSTANCE; return CamelCaseToUnderscoreConverter.INSTANCE;
} }
@Override @Override
public Function<Method, Boolean> getGetterMethodDetector() { public Function<Method, Boolean> getGetterMethodDetector() {
return GetterMethodDetector.INSTANCE; return GetterMethodDetector.INSTANCE;
} }
@Override @Override
public DslInstantiator getDslInstantiator() { public DslInstantiator getDslInstantiator() {
return ReflectionDslInstantiator.INSTANCE; return ReflectionDslInstantiator.INSTANCE;
} }
@Override @Override
public MapperInstantiator getMapperInstantiator() { public MapperInstantiator getMapperInstantiator() {
return ReflectionMapperInstantiator.INSTANCE; return ReflectionMapperInstantiator.INSTANCE;
} }
} }

View file

@ -18,31 +18,32 @@ package net.helenus.config;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.lang.reflect.Modifier; import java.lang.reflect.Modifier;
import java.util.function.Function; import java.util.function.Function;
import net.helenus.mapping.annotation.Transient; import net.helenus.mapping.annotation.Transient;
public enum GetterMethodDetector implements Function<Method, Boolean> { public enum GetterMethodDetector implements Function<Method, Boolean> {
INSTANCE; INSTANCE;
@Override @Override
public Boolean apply(Method method) { public Boolean apply(Method method) {
if (method == null) { if (method == null) {
throw new IllegalArgumentException("empty parameter"); throw new IllegalArgumentException("empty parameter");
} }
if (method.getParameterCount() != 0 || method.getReturnType() == void.class) { if (method.getParameterCount() != 0 || method.getReturnType() == void.class) {
return false; return false;
} }
if (Modifier.isStatic(method.getModifiers())) { if (Modifier.isStatic(method.getModifiers())) {
return false; return false;
} }
// Methods marked "Transient" are not mapped, skip them. // Methods marked "Transient" are not mapped, skip them.
if (method.getDeclaredAnnotation(Transient.class) != null) { if (method.getDeclaredAnnotation(Transient.class) != null) {
return false; return false;
} }
return true; return true;
} }
} }

View file

@ -17,16 +17,17 @@ package net.helenus.config;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.function.Function; import java.util.function.Function;
import net.helenus.core.DslInstantiator; import net.helenus.core.DslInstantiator;
import net.helenus.core.MapperInstantiator; import net.helenus.core.MapperInstantiator;
public interface HelenusSettings { public interface HelenusSettings {
Function<String, String> getPropertyToColumnConverter(); Function<String, String> getPropertyToColumnConverter();
Function<Method, Boolean> getGetterMethodDetector(); Function<Method, Boolean> getGetterMethodDetector();
DslInstantiator getDslInstantiator(); DslInstantiator getDslInstantiator();
MapperInstantiator getMapperInstantiator(); MapperInstantiator getMapperInstantiator();
} }

View file

@ -3,36 +3,37 @@ package net.helenus.core;
import java.time.LocalDateTime; import java.time.LocalDateTime;
import java.time.ZoneId; import java.time.ZoneId;
import java.util.Date; import java.util.Date;
import net.helenus.core.reflect.MapExportable; import net.helenus.core.reflect.MapExportable;
public abstract class AbstractAuditedEntityDraft<E> extends AbstractEntityDraft<E> { public abstract class AbstractAuditedEntityDraft<E> extends AbstractEntityDraft<E> {
public AbstractAuditedEntityDraft(MapExportable entity) { public AbstractAuditedEntityDraft(MapExportable entity) {
super(entity); super(entity);
Date in = new Date(); Date in = new Date();
LocalDateTime ldt = LocalDateTime.ofInstant(in.toInstant(), ZoneId.systemDefault()); LocalDateTime ldt = LocalDateTime.ofInstant(in.toInstant(), ZoneId.systemDefault());
Date now = Date.from(ldt.atZone(ZoneId.systemDefault()).toInstant()); Date now = Date.from(ldt.atZone(ZoneId.systemDefault()).toInstant());
String who = getCurrentAuditor(); String who = getCurrentAuditor();
if (entity == null) { if (entity == null) {
if (who != null) { if (who != null) {
set("createdBy", who); set("createdBy", who);
} }
set("createdAt", now); set("createdAt", now);
} }
if (who != null) { if (who != null) {
set("modifiedBy", who); set("modifiedBy", who);
} }
set("modifiedAt", now); set("modifiedAt", now);
} }
protected String getCurrentAuditor() { protected String getCurrentAuditor() {
return null; return null;
} }
public Date createdAt() { public Date createdAt() {
return (Date) get("createdAt", Date.class); return (Date) get("createdAt", Date.class);
} }
} }

View file

@ -1,7 +1,12 @@
package net.helenus.core; package net.helenus.core;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import com.google.common.primitives.Primitives; import com.google.common.primitives.Primitives;
import java.util.*;
import net.helenus.core.reflect.DefaultPrimitiveTypes; import net.helenus.core.reflect.DefaultPrimitiveTypes;
import net.helenus.core.reflect.Drafted; import net.helenus.core.reflect.Drafted;
import net.helenus.core.reflect.MapExportable; import net.helenus.core.reflect.MapExportable;
@ -9,151 +14,151 @@ import net.helenus.mapping.MappingUtil;
public abstract class AbstractEntityDraft<E> implements Drafted<E> { public abstract class AbstractEntityDraft<E> implements Drafted<E> {
private final Map<String, Object> backingMap = new HashMap<String, Object>(); private final Map<String, Object> backingMap = new HashMap<String, Object>();
private final MapExportable entity; private final MapExportable entity;
private final Map<String, Object> entityMap; private final Map<String, Object> entityMap;
public AbstractEntityDraft(MapExportable entity) { public AbstractEntityDraft(MapExportable entity) {
this.entity = entity; this.entity = entity;
this.entityMap = entity != null ? entity.toMap() : new HashMap<String, Object>(); this.entityMap = entity != null ? entity.toMap() : new HashMap<String, Object>();
} }
public abstract Class<E> getEntityClass(); public abstract Class<E> getEntityClass();
public E build() { public E build() {
return Helenus.map(getEntityClass(), toMap()); return Helenus.map(getEntityClass(), toMap());
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
protected <T> T get(Getter<T> getter, Class<?> returnType) { protected <T> T get(Getter<T> getter, Class<?> returnType) {
return (T) get(this.<T>methodNameFor(getter), returnType); return (T) get(this.<T>methodNameFor(getter), returnType);
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
protected <T> T get(String key, Class<?> returnType) { protected <T> T get(String key, Class<?> returnType) {
T value = (T) backingMap.get(key); T value = (T) backingMap.get(key);
if (value == null) { if (value == null) {
value = (T) entityMap.get(key); value = (T) entityMap.get(key);
if (value == null) { if (value == null) {
if (Primitives.allPrimitiveTypes().contains(returnType)) { if (Primitives.allPrimitiveTypes().contains(returnType)) {
DefaultPrimitiveTypes type = DefaultPrimitiveTypes.lookup(returnType); DefaultPrimitiveTypes type = DefaultPrimitiveTypes.lookup(returnType);
if (type == null) { if (type == null) {
throw new RuntimeException("unknown primitive type " + returnType); throw new RuntimeException("unknown primitive type " + returnType);
} }
return (T) type.getDefaultValue(); return (T) type.getDefaultValue();
} }
} }
} }
return value; return value;
} }
protected <T> Object set(Getter<T> getter, Object value) { protected <T> Object set(Getter<T> getter, Object value) {
return set(this.<T>methodNameFor(getter), value); return set(this.<T>methodNameFor(getter), value);
} }
protected Object set(String key, Object value) { protected Object set(String key, Object value) {
if (key == null || value == null) { if (key == null || value == null) {
return null; return null;
} }
backingMap.put(key, value); backingMap.put(key, value);
return value; return value;
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
protected <T> T mutate(Getter<T> getter, T value) { protected <T> T mutate(Getter<T> getter, T value) {
return (T) mutate(this.<T>methodNameFor(getter), value); return (T) mutate(this.<T>methodNameFor(getter), value);
} }
protected Object mutate(String key, Object value) { protected Object mutate(String key, Object value) {
Objects.requireNonNull(key); Objects.requireNonNull(key);
if (value == null) { if (value == null) {
return null; return null;
} }
if (entity != null) { if (entity != null) {
Map<String, Object> map = entity.toMap(); Map<String, Object> map = entity.toMap();
if (map.containsKey(key) && !value.equals(map.get(key))) { if (map.containsKey(key) && !value.equals(map.get(key))) {
backingMap.put(key, value); backingMap.put(key, value);
return value; return value;
} }
return map.get(key); return map.get(key);
} else { } else {
backingMap.put(key, value); backingMap.put(key, value);
return null; return null;
} }
} }
private <T> String methodNameFor(Getter<T> getter) { private <T> String methodNameFor(Getter<T> getter) {
return MappingUtil.resolveMappingProperty(getter).getProperty().getPropertyName(); return MappingUtil.resolveMappingProperty(getter).getProperty().getPropertyName();
} }
public <T> Object unset(Getter<T> getter) { public <T> Object unset(Getter<T> getter) {
return unset(methodNameFor(getter)); return unset(methodNameFor(getter));
} }
public Object unset(String key) { public Object unset(String key) {
if (key != null) { if (key != null) {
Object value = backingMap.get(key); Object value = backingMap.get(key);
backingMap.put(key, null); backingMap.put(key, null);
return value; return value;
} }
return null; return null;
} }
public <T> boolean reset(Getter<T> getter, T desiredValue) { public <T> boolean reset(Getter<T> getter, T desiredValue) {
return this.<T>reset(this.<T>methodNameFor(getter), desiredValue); return this.<T>reset(this.<T>methodNameFor(getter), desiredValue);
} }
public <T> boolean reset(String key, T desiredValue) { public <T> boolean reset(String key, T desiredValue) {
if (key != null && desiredValue != null) { if (key != null && desiredValue != null) {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
T currentValue = (T) backingMap.get(key); T currentValue = (T) backingMap.get(key);
if (currentValue == null || !currentValue.equals(desiredValue)) { if (currentValue == null || !currentValue.equals(desiredValue)) {
set(key, desiredValue); set(key, desiredValue);
return true; return true;
} }
} }
return false; return false;
} }
@Override @Override
public Map<String, Object> toMap() { public Map<String, Object> toMap() {
return toMap(entityMap); return toMap(entityMap);
} }
public Map<String, Object> toMap(Map<String, Object> entityMap) { public Map<String, Object> toMap(Map<String, Object> entityMap) {
Map<String, Object> combined; Map<String, Object> combined;
if (entityMap != null && entityMap.size() > 0) { if (entityMap != null && entityMap.size() > 0) {
combined = new HashMap<String, Object>(entityMap.size()); combined = new HashMap<String, Object>(entityMap.size());
for (String key : entityMap.keySet()) { for (String key : entityMap.keySet()) {
combined.put(key, entityMap.get(key)); combined.put(key, entityMap.get(key));
} }
} else { } else {
combined = new HashMap<String, Object>(backingMap.size()); combined = new HashMap<String, Object>(backingMap.size());
} }
for (String key : mutated()) { for (String key : mutated()) {
combined.put(key, backingMap.get(key)); combined.put(key, backingMap.get(key));
} }
return combined; return combined;
} }
@Override @Override
public Set<String> mutated() { public Set<String> mutated() {
return backingMap.keySet(); return backingMap.keySet();
} }
@Override @Override
public String toString() { public String toString() {
return backingMap.toString(); return backingMap.toString();
} }
} }

View file

@ -15,112 +15,128 @@
*/ */
package net.helenus.core; package net.helenus.core;
import brave.Tracer;
import com.codahale.metrics.MetricRegistry;
import com.datastax.driver.core.*;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.google.common.util.concurrent.ListenableFuture;
import java.io.PrintStream; import java.io.PrintStream;
import java.util.List;
import java.util.concurrent.Executor; import java.util.concurrent.Executor;
import net.helenus.mapping.value.ColumnValuePreparer;
import net.helenus.mapping.value.ColumnValueProvider;
import net.helenus.support.HelenusException;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import com.codahale.metrics.MetricRegistry;
import com.datastax.driver.core.*;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.google.common.collect.Table;
import com.google.common.util.concurrent.ListenableFuture;
import brave.Tracer;
import net.helenus.core.cache.Facet;
import net.helenus.mapping.value.ColumnValuePreparer;
import net.helenus.mapping.value.ColumnValueProvider;
import net.helenus.support.HelenusException;
public abstract class AbstractSessionOperations { public abstract class AbstractSessionOperations {
final Logger logger = LoggerFactory.getLogger(getClass()); private static final Logger LOG = LoggerFactory.getLogger(AbstractSessionOperations.class);
public abstract Session currentSession(); public abstract Session currentSession();
public abstract String usingKeyspace(); public abstract String usingKeyspace();
public abstract boolean isShowCql(); public abstract boolean isShowCql();
public abstract PrintStream getPrintStream(); public abstract PrintStream getPrintStream();
public abstract Executor getExecutor(); public abstract Executor getExecutor();
public abstract SessionRepository getSessionRepository(); public abstract SessionRepository getSessionRepository();
public abstract ColumnValueProvider getValueProvider(); public abstract ColumnValueProvider getValueProvider();
public abstract ColumnValuePreparer getValuePreparer(); public abstract ColumnValuePreparer getValuePreparer();
public abstract ConsistencyLevel getDefaultConsistencyLevel(); public abstract ConsistencyLevel getDefaultConsistencyLevel();
public abstract boolean getDefaultQueryIdempotency(); public abstract boolean getDefaultQueryIdempotency();
public PreparedStatement prepare(RegularStatement statement) { public PreparedStatement prepare(RegularStatement statement) {
try { try {
log(statement, false); log(statement, false);
return currentSession().prepare(statement); return currentSession().prepare(statement);
} catch (RuntimeException e) { } catch (RuntimeException e) {
throw translateException(e); throw translateException(e);
} }
} }
public ListenableFuture<PreparedStatement> prepareAsync(RegularStatement statement) { public ListenableFuture<PreparedStatement> prepareAsync(RegularStatement statement) {
try { try {
log(statement, false); log(statement, false);
return currentSession().prepareAsync(statement); return currentSession().prepareAsync(statement);
} catch (RuntimeException e) { } catch (RuntimeException e) {
throw translateException(e); throw translateException(e);
} }
} }
public ResultSet execute(Statement statement, boolean showValues) { public ResultSet execute(Statement statement, boolean showValues) {
return executeAsync(statement, showValues).getUninterruptibly(); return executeAsync(statement, showValues).getUninterruptibly();
} }
public ResultSetFuture executeAsync(Statement statement, boolean showValues) { public ResultSetFuture executeAsync(Statement statement, boolean showValues) {
try { try {
log(statement, showValues); log(statement, showValues);
return currentSession().executeAsync(statement); return currentSession().executeAsync(statement);
} catch (RuntimeException e) { } catch (RuntimeException e) {
throw translateException(e); throw translateException(e);
} }
} }
void log(Statement statement, boolean showValues) { void log(Statement statement, boolean showValues) {
if (logger.isInfoEnabled()) { if (LOG.isInfoEnabled()) {
logger.info("Execute statement " + statement); LOG.info("Execute statement " + statement);
} }
if (isShowCql()) { if (isShowCql()) {
if (statement instanceof BuiltStatement) { if (statement instanceof BuiltStatement) {
BuiltStatement builtStatement = (BuiltStatement) statement; BuiltStatement builtStatement = (BuiltStatement) statement;
if (showValues) { if (showValues) {
RegularStatement regularStatement = builtStatement.setForceNoValues(true); RegularStatement regularStatement = builtStatement.setForceNoValues(true);
printCql(regularStatement.getQueryString()); printCql(regularStatement.getQueryString());
} else { } else {
printCql(builtStatement.getQueryString()); printCql(builtStatement.getQueryString());
} }
} else if (statement instanceof RegularStatement) { } else if (statement instanceof RegularStatement) {
RegularStatement regularStatement = (RegularStatement) statement; RegularStatement regularStatement = (RegularStatement) statement;
printCql(regularStatement.getQueryString()); printCql(regularStatement.getQueryString());
} else { } else {
printCql(statement.toString()); printCql(statement.toString());
} }
} }
} }
public Tracer getZipkinTracer() { public Tracer getZipkinTracer() {
return null; return null;
} }
public MetricRegistry getMetricRegistry() { public MetricRegistry getMetricRegistry() {
return null; return null;
} }
RuntimeException translateException(RuntimeException e) { public void mergeCache(Table<String, String, Object> cache) {
if (e instanceof HelenusException) { }
return e;
}
throw new HelenusException(e);
}
void printCql(String cql) { RuntimeException translateException(RuntimeException e) {
getPrintStream().println(cql); if (e instanceof HelenusException) {
} return e;
}
throw new HelenusException(e);
}
public Object checkCache(String tableName, List<Facet> facets) {
return null;
}
public void updateCache(Object pojo, List<Facet> facets) {
}
void printCql(String cql) {
getPrintStream().println(cql);
}
} }

View file

@ -15,163 +15,239 @@
*/ */
package net.helenus.core; package net.helenus.core;
import com.diffplug.common.base.Errors;
import com.google.common.collect.TreeTraverser;
import java.util.*; import java.util.*;
import java.util.concurrent.TimeUnit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.diffplug.common.base.Errors;
import com.google.common.base.Stopwatch;
import com.google.common.collect.HashBasedTable;
import com.google.common.collect.Table;
import com.google.common.collect.TreeTraverser;
import net.helenus.core.cache.CacheUtil;
import net.helenus.core.cache.Facet;
/** Encapsulates the concept of a "transaction" as a unit-of-work. */ /** Encapsulates the concept of a "transaction" as a unit-of-work. */
public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfWork, AutoCloseable { public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfWork<E>, AutoCloseable {
private final List<AbstractUnitOfWork<E>> nested = new ArrayList<>();
private final HelenusSession session;
private final AbstractUnitOfWork<E> parent;
private List<CommitThunk> postCommit = new ArrayList<CommitThunk>();
private final Map<String, Set<Object>> cache = new HashMap<String, Set<Object>>();
private boolean aborted = false;
private boolean committed = false;
protected AbstractUnitOfWork(HelenusSession session, AbstractUnitOfWork<E> parent) { private static final Logger LOG = LoggerFactory.getLogger(AbstractUnitOfWork.class);
Objects.requireNonNull(session, "containing session cannot be null");
this.session = session; private final List<AbstractUnitOfWork<E>> nested = new ArrayList<>();
this.parent = parent; private final HelenusSession session;
} private final AbstractUnitOfWork<E> parent;
// Cache:
private final Table<String, String, Object> cache = HashBasedTable.create();
private List<CommitThunk> postCommit = new ArrayList<CommitThunk>();
private boolean aborted = false;
private boolean committed = false;
private String purpose_;
private Stopwatch elapsedTime_;
private Stopwatch databaseTime_ = Stopwatch.createUnstarted();
private Stopwatch cacheLookupTime_ = Stopwatch.createUnstarted();
public UnitOfWork addNestedUnitOfWork(UnitOfWork uow) { protected AbstractUnitOfWork(HelenusSession session, AbstractUnitOfWork<E> parent) {
synchronized (nested) { Objects.requireNonNull(session, "containing session cannot be null");
nested.add((AbstractUnitOfWork<E>) uow);
}
return this;
}
public UnitOfWork begin() { this.session = session;
// log.record(txn::start) this.parent = parent;
return this; }
}
private void applyPostCommitFunctions() { @Override
if (!postCommit.isEmpty()) { public Stopwatch getExecutionTimer() {
for (CommitThunk f : postCommit) { return databaseTime_;
f.apply(); }
}
}
}
public Set<Object> cacheLookup(String key) { @Override
Set<Object> r = getCache().get(key); public Stopwatch getCacheLookupTimer() {
if (r != null) { return cacheLookupTime_;
return r; }
} else {
if (parent != null) {
return parent.cacheLookup(key);
}
}
return null;
}
public Map<String, Set<Object>> getCache() { @Override
return cache; public void addNestedUnitOfWork(UnitOfWork<E> uow) {
} synchronized (nested) {
nested.add((AbstractUnitOfWork<E>) uow);
}
}
private Iterator<AbstractUnitOfWork<E>> getChildNodes() { @Override
return nested.iterator(); public UnitOfWork<E> begin() {
} elapsedTime_ = Stopwatch.createStarted();
// log.record(txn::start)
return this;
}
/** @Override
* Checks to see if the work performed between calling begin and now can be committed or not. public UnitOfWork setPurpose(String purpose) {
* purpose_ = purpose;
* @return a function from which to chain work that only happens when commit is successful return this;
* @throws E when the work overlaps with other concurrent writers. }
*/
public PostCommitFunction<Void, Void> commit() throws E {
// All nested UnitOfWork should be committed (not aborted) before calls to commit, check.
boolean canCommit = true;
TreeTraverser<AbstractUnitOfWork<E>> traverser =
TreeTraverser.using(node -> node::getChildNodes);
for (AbstractUnitOfWork<E> uow : traverser.postOrderTraversal(this)) {
if (this != uow) {
canCommit &= (!uow.aborted && uow.committed);
}
}
// log.record(txn::provisionalCommit) public void logTimers(String what) {
// examine log for conflicts in read-set and write-set between begin and provisional commit double e = (double) elapsedTime_.elapsed(TimeUnit.MICROSECONDS) / 1000.0;
// if (conflict) { throw new ConflictingUnitOfWorkException(this) } double d = (double) databaseTime_.elapsed(TimeUnit.MICROSECONDS) / 1000.0;
// else return function so as to enable commit.andThen(() -> { do something iff commit was successful; }) double c = (double) cacheLookupTime_.elapsed(TimeUnit.MICROSECONDS) / 1000.0;
double fd = (d / (e - c)) * 100.0;
double fc = (c / (e - d)) * 100.0;
LOG.info(String.format("UOW(%s)%s %s (total: %.3fms cache: %.3fms %2.2f%% db: %.3fms %2.2f%%)", hashCode(),
(purpose_ == null ? "" : " " + purpose_), what, e, c, fc, d, fd));
}
if (canCommit) { private void applyPostCommitFunctions() {
committed = true; if (!postCommit.isEmpty()) {
aborted = false; for (CommitThunk f : postCommit) {
f.apply();
}
}
logTimers("committed");
}
// TODO(gburd): union this cache with parent's (if there is a parent) or with the session cache for all cacheable entities we currently hold @Override
public Optional<Object> cacheLookup(List<Facet> facets) {
String tableName = CacheUtil.schemaName(facets);
Optional<Object> result = Optional.empty();
for (Facet facet : facets) {
if (!facet.fixed()) {
String columnName = facet.name() + "==" + facet.value();
Object value = cache.get(tableName, columnName);
if (value != null) {
if (result.isPresent() && result.get() != value) {
// One facet matched, but another did not.
result = Optional.empty();
break;
} else {
result = Optional.of(value);
}
}
}
}
if (!result.isPresent()) {
// Be sure to check all enclosing UnitOfWork caches as well, we may be nested.
if (parent != null) {
return parent.cacheLookup(facets);
}
}
return result;
}
nested.forEach((uow) -> Errors.rethrow().wrap(uow::commit)); @Override
public void cacheUpdate(Object value, List<Facet> facets) {
Facet table = facets.remove(0);
String tableName = table.value().toString();
for (Facet facet : facets) {
String columnName = facet.name() + "==" + facet.value();
cache.put(tableName, columnName, value);
}
}
// Merge UOW cache into parent's cache. private Iterator<AbstractUnitOfWork<E>> getChildNodes() {
if (parent != null) { return nested.iterator();
Map<String, Set<Object>> parentCache = parent.getCache(); }
for (String key : cache.keySet()) {
if (parentCache.containsKey(key)) {
// merge the sets
Set<Object> ps = parentCache.get(key);
ps.addAll(
cache.get(key)); //TODO(gburd): review this, likely not correct in all cases as-is.
} else {
// add the missing set
parentCache.put(key, cache.get(key));
}
}
}
// Apply all post-commit functions for /**
if (parent == null) { * Checks to see if the work performed between calling begin and now can be
traverser * committed or not.
.postOrderTraversal(this) *
.forEach( * @return a function from which to chain work that only happens when commit is
uow -> { * successful
uow.applyPostCommitFunctions(); * @throws E
}); * when the work overlaps with other concurrent writers.
return new PostCommitFunction(this, null); */
} public PostCommitFunction<Void, Void> commit() throws E {
} // All nested UnitOfWork should be committed (not aborted) before calls to
// else { // commit, check.
// Constructor<T> ctor = clazz.getConstructor(conflictExceptionClass); boolean canCommit = true;
// T object = ctor.newInstance(new Object[] { String message }); TreeTraverser<AbstractUnitOfWork<E>> traverser = TreeTraverser.using(node -> node::getChildNodes);
// } for (AbstractUnitOfWork<E> uow : traverser.postOrderTraversal(this)) {
return new PostCommitFunction(this, postCommit); if (this != uow) {
} canCommit &= (!uow.aborted && uow.committed);
}
}
/* Explicitly discard the work and mark it as as such in the log. */ // log.record(txn::provisionalCommit)
public void abort() { // examine log for conflicts in read-set and write-set between begin and
TreeTraverser<AbstractUnitOfWork<E>> traverser = // provisional commit
TreeTraverser.using(node -> node::getChildNodes); // if (conflict) { throw new ConflictingUnitOfWorkException(this) }
traverser // else return function so as to enable commit.andThen(() -> { do something iff
.postOrderTraversal(this) // commit was successful; })
.forEach(
uow -> {
uow.committed = false;
uow.aborted = true;
});
// log.record(txn::abort)
// cache.invalidateSince(txn::start time)
}
public String describeConflicts() { if (canCommit) {
return "it's complex..."; committed = true;
} aborted = false;
@Override nested.forEach((uow) -> Errors.rethrow().wrap(uow::commit));
public void close() throws E {
// Closing a AbstractUnitOfWork will abort iff we've not already aborted or committed this unit of work.
if (aborted == false && committed == false) {
abort();
}
}
public boolean hasAborted() { // Merge UOW cache into parent's cache.
return aborted; if (parent != null) {
} parent.mergeCache(cache);
} else {
session.mergeCache(cache);
}
elapsedTime_.stop();
public boolean hasCommitted() { // Apply all post-commit functions for
return committed; if (parent == null) {
} traverser.postOrderTraversal(this).forEach(uow -> {
uow.applyPostCommitFunctions();
});
return new PostCommitFunction(this, null);
}
}
// else {
// Constructor<T> ctor = clazz.getConstructor(conflictExceptionClass);
// T object = ctor.newInstance(new Object[] { String message });
// }
return new PostCommitFunction(this, postCommit);
}
/* Explicitly discard the work and mark it as as such in the log. */
public void abort() {
TreeTraverser<AbstractUnitOfWork<E>> traverser = TreeTraverser.using(node -> node::getChildNodes);
traverser.postOrderTraversal(this).forEach(uow -> {
uow.committed = false;
uow.aborted = true;
});
// log.record(txn::abort)
// cache.invalidateSince(txn::start time)
if (!hasAborted()) {
elapsedTime_.stop();
logTimers("aborted");
}
}
private void mergeCache(Table<String, String, Object> from) {
Table<String, String, Object> to = this.cache;
from.rowMap().forEach((rowKey, columnMap) -> {
columnMap.forEach((columnKey, value) -> {
if (to.contains(rowKey, columnKey)) {
to.put(rowKey, columnKey, CacheUtil.merge(to.get(rowKey, columnKey), from.get(rowKey, columnKey)));
} else {
to.put(rowKey, columnKey, from.get(rowKey, columnKey));
}
});
});
}
public String describeConflicts() {
return "it's complex...";
}
@Override
public void close() throws E {
// Closing a AbstractUnitOfWork will abort iff we've not already aborted or
// committed this unit of work.
if (aborted == false && committed == false) {
abort();
}
}
public boolean hasAborted() {
return aborted;
}
public boolean hasCommitted() {
return committed;
}
} }

View file

@ -16,8 +16,5 @@
package net.helenus.core; package net.helenus.core;
public enum AutoDdl { public enum AutoDdl {
VALIDATE, VALIDATE, UPDATE, CREATE, CREATE_DROP;
UPDATE,
CREATE,
CREATE_DROP;
} }

View file

@ -1,7 +1,6 @@
package net.helenus.core; package net.helenus.core;
@FunctionalInterface @FunctionalInterface
public interface CommitThunk { public interface CommitThunk {
void apply(); void apply();
} }

View file

@ -2,9 +2,9 @@ package net.helenus.core;
public class ConflictingUnitOfWorkException extends Exception { public class ConflictingUnitOfWorkException extends Exception {
final UnitOfWork uow; final UnitOfWork uow;
ConflictingUnitOfWorkException(UnitOfWork uow) { ConflictingUnitOfWorkException(UnitOfWork uow) {
this.uow = uow; this.uow = uow;
} }
} }

View file

@ -15,15 +15,13 @@
*/ */
package net.helenus.core; package net.helenus.core;
import com.datastax.driver.core.Metadata;
import java.util.Optional; import java.util.Optional;
import com.datastax.driver.core.Metadata;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
public interface DslInstantiator { public interface DslInstantiator {
<E> E instantiate( <E> E instantiate(Class<E> iface, ClassLoader classLoader, Optional<HelenusPropertyNode> parent, Metadata metadata);
Class<E> iface,
ClassLoader classLoader,
Optional<HelenusPropertyNode> parent,
Metadata metadata);
} }

View file

@ -15,97 +15,102 @@
*/ */
package net.helenus.core; package net.helenus.core;
import com.datastax.driver.core.querybuilder.Clause;
import java.util.Objects; import java.util.Objects;
import com.datastax.driver.core.querybuilder.Clause;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.MappingUtil; import net.helenus.mapping.MappingUtil;
import net.helenus.mapping.value.ColumnValuePreparer; import net.helenus.mapping.value.ColumnValuePreparer;
public final class Filter<V> { public final class Filter<V> {
private final HelenusPropertyNode node; private final HelenusPropertyNode node;
private final Postulate<V> postulate; private final Postulate<V> postulate;
private Filter(HelenusPropertyNode node, Postulate<V> postulate) { private Filter(HelenusPropertyNode node, Postulate<V> postulate) {
this.node = node; this.node = node;
this.postulate = postulate; this.postulate = postulate;
} }
public HelenusPropertyNode getNode() { public static <V> Filter<V> equal(Getter<V> getter, V val) {
return node; return create(getter, Operator.EQ, val);
} }
public Clause getClause(ColumnValuePreparer valuePreparer) { public static <V> Filter<V> in(Getter<V> getter, V... vals) {
return postulate.getClause(node, valuePreparer); Objects.requireNonNull(getter, "empty getter");
} Objects.requireNonNull(vals, "empty values");
public static <V> Filter<V> equal(Getter<V> getter, V val) { if (vals.length == 0) {
return create(getter, Operator.EQ, val); throw new IllegalArgumentException("values array is empty");
} }
public static <V> Filter<V> in(Getter<V> getter, V... vals) { for (int i = 0; i != vals.length; ++i) {
Objects.requireNonNull(getter, "empty getter"); Objects.requireNonNull(vals[i], "value[" + i + "] is empty");
Objects.requireNonNull(vals, "empty values"); }
if (vals.length == 0) { HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter);
throw new IllegalArgumentException("values array is empty");
}
for (int i = 0; i != vals.length; ++i) { Postulate<V> postulate = Postulate.of(Operator.IN, vals);
Objects.requireNonNull(vals[i], "value[" + i + "] is empty");
}
HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter); return new Filter<V>(node, postulate);
}
Postulate<V> postulate = Postulate.of(Operator.IN, vals); public static <V> Filter<V> greaterThan(Getter<V> getter, V val) {
return create(getter, Operator.GT, val);
}
return new Filter<V>(node, postulate); public static <V> Filter<V> lessThan(Getter<V> getter, V val) {
} return create(getter, Operator.LT, val);
}
public static <V> Filter<V> greaterThan(Getter<V> getter, V val) { public static <V> Filter<V> greaterThanOrEqual(Getter<V> getter, V val) {
return create(getter, Operator.GT, val); return create(getter, Operator.GTE, val);
} }
public static <V> Filter<V> lessThan(Getter<V> getter, V val) { public static <V> Filter<V> lessThanOrEqual(Getter<V> getter, V val) {
return create(getter, Operator.LT, val); return create(getter, Operator.LTE, val);
} }
public static <V> Filter<V> greaterThanOrEqual(Getter<V> getter, V val) { public static <V> Filter<V> create(Getter<V> getter, Postulate<V> postulate) {
return create(getter, Operator.GTE, val); Objects.requireNonNull(getter, "empty getter");
} Objects.requireNonNull(postulate, "empty operator");
public static <V> Filter<V> lessThanOrEqual(Getter<V> getter, V val) { HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter);
return create(getter, Operator.LTE, val);
}
public static <V> Filter<V> create(Getter<V> getter, Postulate<V> postulate) { return new Filter<V>(node, postulate);
Objects.requireNonNull(getter, "empty getter"); }
Objects.requireNonNull(postulate, "empty operator");
HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter); public static <V> Filter<V> create(Getter<V> getter, Operator op, V val) {
Objects.requireNonNull(getter, "empty getter");
Objects.requireNonNull(op, "empty op");
Objects.requireNonNull(val, "empty value");
return new Filter<V>(node, postulate); if (op == Operator.IN) {
} throw new IllegalArgumentException("invalid usage of the 'in' operator, use Filter.in() static method");
}
public static <V> Filter<V> create(Getter<V> getter, Operator op, V val) { HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter);
Objects.requireNonNull(getter, "empty getter");
Objects.requireNonNull(op, "empty op");
Objects.requireNonNull(val, "empty value");
if (op == Operator.IN) { Postulate<V> postulate = Postulate.of(op, val);
throw new IllegalArgumentException(
"invalid usage of the 'in' operator, use Filter.in() static method");
}
HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter); return new Filter<V>(node, postulate);
}
Postulate<V> postulate = Postulate.of(op, val); public HelenusPropertyNode getNode() {
return node;
}
return new Filter<V>(node, postulate); public Clause getClause(ColumnValuePreparer valuePreparer) {
} return postulate.getClause(node, valuePreparer);
}
@Override public V[] postulateValues() {
public String toString() { return postulate.values();
return node.getColumnName() + postulate.toString(); }
}
@Override
public String toString() {
return node.getColumnName() + postulate.toString();
}
} }

View file

@ -17,5 +17,5 @@ package net.helenus.core;
public interface Getter<V> { public interface Getter<V> {
V get(); V get();
} }

View file

@ -15,12 +15,17 @@
*/ */
package net.helenus.core; package net.helenus.core;
import java.util.HashSet;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import com.datastax.driver.core.Cluster; import com.datastax.driver.core.Cluster;
import com.datastax.driver.core.Metadata; import com.datastax.driver.core.Metadata;
import com.datastax.driver.core.Session; import com.datastax.driver.core.Session;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import net.helenus.config.DefaultHelenusSettings; import net.helenus.config.DefaultHelenusSettings;
import net.helenus.config.HelenusSettings; import net.helenus.config.HelenusSettings;
import net.helenus.core.reflect.DslExportable; import net.helenus.core.reflect.DslExportable;
@ -30,164 +35,161 @@ import net.helenus.support.HelenusMappingException;
public final class Helenus { public final class Helenus {
private static volatile HelenusSettings settings = new DefaultHelenusSettings(); private static final ConcurrentMap<Class<?>, Object> dslCache = new ConcurrentHashMap<Class<?>, Object>();
private static final ConcurrentMap<Class<?>, Object> dslCache = private static final ConcurrentMap<Class<?>, Metadata> metadataForEntity = new ConcurrentHashMap<Class<?>, Metadata>();
new ConcurrentHashMap<Class<?>, Object>(); private static final Set<HelenusSession> sessions = new HashSet<HelenusSession>();
private static final ConcurrentMap<Class<?>, Metadata> metadataForEntity = private static volatile HelenusSettings settings = new DefaultHelenusSettings();
new ConcurrentHashMap<Class<?>, Metadata>(); private static volatile HelenusSession singleton;
private static final Set<HelenusSession> sessions = new HashSet<HelenusSession>();
private static volatile HelenusSession singleton;
private Helenus() {} private Helenus() {
}
protected static void setSession(HelenusSession session) { protected static void setSession(HelenusSession session) {
sessions.add(session); sessions.add(session);
singleton = session; singleton = session;
} }
public static HelenusSession session() { public static HelenusSession session() {
return singleton; return singleton;
} }
public static void shutdown() { public static void shutdown() {
sessions.forEach( sessions.forEach((session) -> {
(session) -> { session.close();
session.close(); sessions.remove(session);
sessions.remove(session); });
}); dslCache.clear();
dslCache.clear(); }
}
public static HelenusSettings settings() { public static HelenusSettings settings() {
return settings; return settings;
} }
public static HelenusSettings settings(HelenusSettings overrideSettings) { public static HelenusSettings settings(HelenusSettings overrideSettings) {
HelenusSettings old = settings; HelenusSettings old = settings;
settings = overrideSettings; settings = overrideSettings;
return old; return old;
} }
public static SessionInitializer connect(Cluster cluster) { public static SessionInitializer connect(Cluster cluster) {
Session session = cluster.connect(); Session session = cluster.connect();
return new SessionInitializer(session); return new SessionInitializer(session);
} }
public static SessionInitializer connect(Cluster cluster, String keyspace) { public static SessionInitializer connect(Cluster cluster, String keyspace) {
Session session = cluster.connect(keyspace); Session session = cluster.connect(keyspace);
return new SessionInitializer(session); return new SessionInitializer(session);
} }
public static SessionInitializer init(Session session) { public static SessionInitializer init(Session session) {
if (session == null) { if (session == null) {
throw new IllegalArgumentException("empty session"); throw new IllegalArgumentException("empty session");
} }
return new SessionInitializer(session); return new SessionInitializer(session);
} }
public static void clearDslCache() { public static void clearDslCache() {
dslCache.clear(); dslCache.clear();
} }
public static <E> E dsl(Class<E> iface) { public static <E> E dsl(Class<E> iface) {
return dsl(iface, null); return dsl(iface, null);
} }
public static <E> E dsl(Class<E> iface, Metadata metadata) { public static <E> E dsl(Class<E> iface, Metadata metadata) {
return dsl(iface, iface.getClassLoader(), Optional.empty(), metadata); return dsl(iface, iface.getClassLoader(), Optional.empty(), metadata);
} }
public static <E> E dsl(Class<E> iface, ClassLoader classLoader, Metadata metadata) { public static <E> E dsl(Class<E> iface, ClassLoader classLoader, Metadata metadata) {
return dsl(iface, classLoader, Optional.empty(), metadata); return dsl(iface, classLoader, Optional.empty(), metadata);
} }
public static <E> E dsl( public static <E> E dsl(Class<E> iface, ClassLoader classLoader, Optional<HelenusPropertyNode> parent,
Class<E> iface, Metadata metadata) {
ClassLoader classLoader,
Optional<HelenusPropertyNode> parent,
Metadata metadata) {
Object instance = null; Object instance = null;
if (!parent.isPresent()) { if (!parent.isPresent()) {
instance = dslCache.get(iface); instance = dslCache.get(iface);
} }
if (instance == null) { if (instance == null) {
instance = settings.getDslInstantiator().instantiate(iface, classLoader, parent, metadata); instance = settings.getDslInstantiator().instantiate(iface, classLoader, parent, metadata);
if (!parent.isPresent()) { if (!parent.isPresent()) {
Object c = dslCache.putIfAbsent(iface, instance); Object c = dslCache.putIfAbsent(iface, instance);
if (c != null) { if (c != null) {
instance = c; instance = c;
} }
} }
} }
return (E) instance; return (E) instance;
} }
public static <E> E map(Class<E> iface, Map<String, Object> src) { public static <E> E map(Class<E> iface, Map<String, Object> src) {
return map(iface, src, iface.getClassLoader()); return map(iface, src, iface.getClassLoader());
} }
public static <E> E map(Class<E> iface, Map<String, Object> src, ClassLoader classLoader) { public static <E> E map(Class<E> iface, Map<String, Object> src, ClassLoader classLoader) {
return settings.getMapperInstantiator().instantiate(iface, src, classLoader); return settings.getMapperInstantiator().instantiate(iface, src, classLoader);
} }
public static HelenusEntity entity(Class<?> iface) { public static HelenusEntity entity(Class<?> iface) {
Metadata metadata = metadataForEntity.get(iface); Metadata metadata = metadataForEntity.get(iface);
if (metadata == null) { if (metadata == null) {
HelenusSession session = session(); HelenusSession session = session();
if (session != null) { if (session != null) {
metadata = session.getMetadata(); metadata = session.getMetadata();
} }
} }
return entity(iface, metadata); return entity(iface, metadata);
} }
public static HelenusEntity entity(Class<?> iface, Metadata metadata) { public static HelenusEntity entity(Class<?> iface, Metadata metadata) {
Object dsl = dsl(iface, metadata); Object dsl = dsl(iface, metadata);
DslExportable e = (DslExportable) dsl; DslExportable e = (DslExportable) dsl;
return e.getHelenusMappingEntity(); return e.getHelenusMappingEntity();
} }
public static HelenusEntity resolve(Object ifaceOrDsl) { public static HelenusEntity resolve(Object ifaceOrDsl) {
return resolve(ifaceOrDsl, metadataForEntity.get(ifaceOrDsl)); return resolve(ifaceOrDsl, metadataForEntity.get(ifaceOrDsl));
} }
public static HelenusEntity resolve(Object ifaceOrDsl, Metadata metadata) { public static HelenusEntity resolve(Object ifaceOrDsl, Metadata metadata) {
if (ifaceOrDsl == null) { if (ifaceOrDsl == null) {
throw new HelenusMappingException("ifaceOrDsl is null"); throw new HelenusMappingException("ifaceOrDsl is null");
} }
if (ifaceOrDsl instanceof DslExportable) { if (ifaceOrDsl instanceof DslExportable) {
DslExportable e = (DslExportable) ifaceOrDsl; DslExportable e = (DslExportable) ifaceOrDsl;
return e.getHelenusMappingEntity(); return e.getHelenusMappingEntity();
} }
if (ifaceOrDsl instanceof Class) { if (ifaceOrDsl instanceof Class) {
Class<?> iface = (Class<?>) ifaceOrDsl; Class<?> iface = (Class<?>) ifaceOrDsl;
if (!iface.isInterface()) { if (!iface.isInterface()) {
throw new HelenusMappingException("class is not an interface " + iface); throw new HelenusMappingException("class is not an interface " + iface);
} }
metadataForEntity.putIfAbsent(iface, metadata); if (metadata != null) {
return entity(iface, metadata); metadataForEntity.putIfAbsent(iface, metadata);
} }
return entity(iface, metadata);
}
throw new HelenusMappingException("unknown dsl object or mapping interface " + ifaceOrDsl); throw new HelenusMappingException("unknown dsl object or mapping interface " + ifaceOrDsl);
} }
} }

File diff suppressed because it is too large Load diff

View file

@ -16,32 +16,33 @@
package net.helenus.core; package net.helenus.core;
import java.lang.annotation.Annotation; import java.lang.annotation.Annotation;
import javax.validation.ConstraintValidator; import javax.validation.ConstraintValidator;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
import net.helenus.support.HelenusException; import net.helenus.support.HelenusException;
import net.helenus.support.HelenusMappingException; import net.helenus.support.HelenusMappingException;
public enum HelenusValidator implements PropertyValueValidator { public enum HelenusValidator implements PropertyValueValidator {
INSTANCE; INSTANCE;
public void validate(HelenusProperty prop, Object value) { public void validate(HelenusProperty prop, Object value) {
for (ConstraintValidator<? extends Annotation, ?> validator : prop.getValidators()) { for (ConstraintValidator<? extends Annotation, ?> validator : prop.getValidators()) {
ConstraintValidator typeless = (ConstraintValidator) validator; ConstraintValidator typeless = (ConstraintValidator) validator;
boolean valid = false; boolean valid = false;
try { try {
valid = typeless.isValid(value, null); valid = typeless.isValid(value, null);
} catch (ClassCastException e) { } catch (ClassCastException e) {
throw new HelenusMappingException( throw new HelenusMappingException("validator was used for wrong type '" + value + "' in " + prop, e);
"validator was used for wrong type '" + value + "' in " + prop, e); }
}
if (!valid) { if (!valid) {
throw new HelenusException("wrong value '" + value + "' for " + prop); throw new HelenusException("wrong value '" + value + "' for " + prop);
} }
} }
} }
} }

View file

@ -19,5 +19,5 @@ import java.util.Map;
public interface MapperInstantiator { public interface MapperInstantiator {
<E> E instantiate(Class<E> iface, Map<String, Object> src, ClassLoader classLoader); <E> E instantiate(Class<E> iface, Map<String, Object> src, ClassLoader classLoader);
} }

View file

@ -15,8 +15,10 @@
*/ */
package net.helenus.core; package net.helenus.core;
import com.datastax.driver.core.Row;
import java.util.function.Function; import java.util.function.Function;
import com.datastax.driver.core.Row;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
import net.helenus.mapping.value.ColumnValueProvider; import net.helenus.mapping.value.ColumnValueProvider;
@ -24,203 +26,161 @@ import net.helenus.support.Fun;
public final class Mappers { public final class Mappers {
private Mappers() {} private Mappers() {
}
public static final class Mapper1<A> implements Function<Row, Fun.Tuple1<A>> { public static final class Mapper1<A> implements Function<Row, Fun.Tuple1<A>> {
private final ColumnValueProvider provider; private final ColumnValueProvider provider;
private final HelenusProperty p1; private final HelenusProperty p1;
public Mapper1(ColumnValueProvider provider, HelenusPropertyNode p1) { public Mapper1(ColumnValueProvider provider, HelenusPropertyNode p1) {
this.provider = provider; this.provider = provider;
this.p1 = p1.getProperty(); this.p1 = p1.getProperty();
} }
@Override @Override
public Fun.Tuple1<A> apply(Row row) { public Fun.Tuple1<A> apply(Row row) {
return new Fun.Tuple1<A>(provider.getColumnValue(row, 0, p1)); return new Fun.Tuple1<A>(provider.getColumnValue(row, 0, p1));
} }
} }
public static final class Mapper2<A, B> implements Function<Row, Fun.Tuple2<A, B>> { public static final class Mapper2<A, B> implements Function<Row, Fun.Tuple2<A, B>> {
private final ColumnValueProvider provider; private final ColumnValueProvider provider;
private final HelenusProperty p1; private final HelenusProperty p1;
private final HelenusProperty p2; private final HelenusProperty p2;
public Mapper2(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2) { public Mapper2(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2) {
this.provider = provider; this.provider = provider;
this.p1 = p1.getProperty(); this.p1 = p1.getProperty();
this.p2 = p2.getProperty(); this.p2 = p2.getProperty();
} }
@Override @Override
public Fun.Tuple2<A, B> apply(Row row) { public Fun.Tuple2<A, B> apply(Row row) {
return new Fun.Tuple2<A, B>( return new Fun.Tuple2<A, B>(provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2));
provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2)); }
} }
}
public static final class Mapper3<A, B, C> implements Function<Row, Fun.Tuple3<A, B, C>> { public static final class Mapper3<A, B, C> implements Function<Row, Fun.Tuple3<A, B, C>> {
private final ColumnValueProvider provider; private final ColumnValueProvider provider;
private final HelenusProperty p1; private final HelenusProperty p1;
private final HelenusProperty p2; private final HelenusProperty p2;
private final HelenusProperty p3; private final HelenusProperty p3;
public Mapper3( public Mapper3(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2,
ColumnValueProvider provider, HelenusPropertyNode p3) {
HelenusPropertyNode p1, this.provider = provider;
HelenusPropertyNode p2, this.p1 = p1.getProperty();
HelenusPropertyNode p3) { this.p2 = p2.getProperty();
this.provider = provider; this.p3 = p3.getProperty();
this.p1 = p1.getProperty(); }
this.p2 = p2.getProperty();
this.p3 = p3.getProperty();
}
@Override @Override
public Fun.Tuple3<A, B, C> apply(Row row) { public Fun.Tuple3<A, B, C> apply(Row row) {
return new Fun.Tuple3<A, B, C>( return new Fun.Tuple3<A, B, C>(provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2),
provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 2, p3));
provider.getColumnValue(row, 1, p2), }
provider.getColumnValue(row, 2, p3)); }
}
}
public static final class Mapper4<A, B, C, D> implements Function<Row, Fun.Tuple4<A, B, C, D>> { public static final class Mapper4<A, B, C, D> implements Function<Row, Fun.Tuple4<A, B, C, D>> {
private final ColumnValueProvider provider; private final ColumnValueProvider provider;
private final HelenusProperty p1; private final HelenusProperty p1;
private final HelenusProperty p2; private final HelenusProperty p2;
private final HelenusProperty p3; private final HelenusProperty p3;
private final HelenusProperty p4; private final HelenusProperty p4;
public Mapper4( public Mapper4(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2,
ColumnValueProvider provider, HelenusPropertyNode p3, HelenusPropertyNode p4) {
HelenusPropertyNode p1, this.provider = provider;
HelenusPropertyNode p2, this.p1 = p1.getProperty();
HelenusPropertyNode p3, this.p2 = p2.getProperty();
HelenusPropertyNode p4) { this.p3 = p3.getProperty();
this.provider = provider; this.p4 = p4.getProperty();
this.p1 = p1.getProperty(); }
this.p2 = p2.getProperty();
this.p3 = p3.getProperty();
this.p4 = p4.getProperty();
}
@Override @Override
public Fun.Tuple4<A, B, C, D> apply(Row row) { public Fun.Tuple4<A, B, C, D> apply(Row row) {
return new Fun.Tuple4<A, B, C, D>( return new Fun.Tuple4<A, B, C, D>(provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2),
provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 2, p3), provider.getColumnValue(row, 3, p4));
provider.getColumnValue(row, 1, p2), }
provider.getColumnValue(row, 2, p3), }
provider.getColumnValue(row, 3, p4));
}
}
public static final class Mapper5<A, B, C, D, E> public static final class Mapper5<A, B, C, D, E> implements Function<Row, Fun.Tuple5<A, B, C, D, E>> {
implements Function<Row, Fun.Tuple5<A, B, C, D, E>> {
private final ColumnValueProvider provider; private final ColumnValueProvider provider;
private final HelenusProperty p1, p2, p3, p4, p5; private final HelenusProperty p1, p2, p3, p4, p5;
public Mapper5( public Mapper5(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2,
ColumnValueProvider provider, HelenusPropertyNode p3, HelenusPropertyNode p4, HelenusPropertyNode p5) {
HelenusPropertyNode p1, this.provider = provider;
HelenusPropertyNode p2, this.p1 = p1.getProperty();
HelenusPropertyNode p3, this.p2 = p2.getProperty();
HelenusPropertyNode p4, this.p3 = p3.getProperty();
HelenusPropertyNode p5) { this.p4 = p4.getProperty();
this.provider = provider; this.p5 = p5.getProperty();
this.p1 = p1.getProperty(); }
this.p2 = p2.getProperty();
this.p3 = p3.getProperty();
this.p4 = p4.getProperty();
this.p5 = p5.getProperty();
}
@Override @Override
public Fun.Tuple5<A, B, C, D, E> apply(Row row) { public Fun.Tuple5<A, B, C, D, E> apply(Row row) {
return new Fun.Tuple5<A, B, C, D, E>( return new Fun.Tuple5<A, B, C, D, E>(provider.getColumnValue(row, 0, p1),
provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2), provider.getColumnValue(row, 2, p3),
provider.getColumnValue(row, 1, p2), provider.getColumnValue(row, 3, p4), provider.getColumnValue(row, 4, p5));
provider.getColumnValue(row, 2, p3), }
provider.getColumnValue(row, 3, p4), }
provider.getColumnValue(row, 4, p5));
}
}
public static final class Mapper6<A, B, C, D, E, F> public static final class Mapper6<A, B, C, D, E, F> implements Function<Row, Fun.Tuple6<A, B, C, D, E, F>> {
implements Function<Row, Fun.Tuple6<A, B, C, D, E, F>> {
private final ColumnValueProvider provider; private final ColumnValueProvider provider;
private final HelenusProperty p1, p2, p3, p4, p5, p6; private final HelenusProperty p1, p2, p3, p4, p5, p6;
public Mapper6( public Mapper6(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2,
ColumnValueProvider provider, HelenusPropertyNode p3, HelenusPropertyNode p4, HelenusPropertyNode p5, HelenusPropertyNode p6) {
HelenusPropertyNode p1, this.provider = provider;
HelenusPropertyNode p2, this.p1 = p1.getProperty();
HelenusPropertyNode p3, this.p2 = p2.getProperty();
HelenusPropertyNode p4, this.p3 = p3.getProperty();
HelenusPropertyNode p5, this.p4 = p4.getProperty();
HelenusPropertyNode p6) { this.p5 = p5.getProperty();
this.provider = provider; this.p6 = p6.getProperty();
this.p1 = p1.getProperty(); }
this.p2 = p2.getProperty();
this.p3 = p3.getProperty();
this.p4 = p4.getProperty();
this.p5 = p5.getProperty();
this.p6 = p6.getProperty();
}
@Override @Override
public Fun.Tuple6<A, B, C, D, E, F> apply(Row row) { public Fun.Tuple6<A, B, C, D, E, F> apply(Row row) {
return new Fun.Tuple6<A, B, C, D, E, F>( return new Fun.Tuple6<A, B, C, D, E, F>(provider.getColumnValue(row, 0, p1),
provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2), provider.getColumnValue(row, 2, p3),
provider.getColumnValue(row, 1, p2), provider.getColumnValue(row, 3, p4), provider.getColumnValue(row, 4, p5),
provider.getColumnValue(row, 2, p3), provider.getColumnValue(row, 5, p6));
provider.getColumnValue(row, 3, p4), }
provider.getColumnValue(row, 4, p5), }
provider.getColumnValue(row, 5, p6));
}
}
public static final class Mapper7<A, B, C, D, E, F, G> public static final class Mapper7<A, B, C, D, E, F, G> implements Function<Row, Fun.Tuple7<A, B, C, D, E, F, G>> {
implements Function<Row, Fun.Tuple7<A, B, C, D, E, F, G>> {
private final ColumnValueProvider provider; private final ColumnValueProvider provider;
private final HelenusProperty p1, p2, p3, p4, p5, p6, p7; private final HelenusProperty p1, p2, p3, p4, p5, p6, p7;
public Mapper7( public Mapper7(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2,
ColumnValueProvider provider, HelenusPropertyNode p3, HelenusPropertyNode p4, HelenusPropertyNode p5, HelenusPropertyNode p6,
HelenusPropertyNode p1, HelenusPropertyNode p7) {
HelenusPropertyNode p2, this.provider = provider;
HelenusPropertyNode p3, this.p1 = p1.getProperty();
HelenusPropertyNode p4, this.p2 = p2.getProperty();
HelenusPropertyNode p5, this.p3 = p3.getProperty();
HelenusPropertyNode p6, this.p4 = p4.getProperty();
HelenusPropertyNode p7) { this.p5 = p5.getProperty();
this.provider = provider; this.p6 = p6.getProperty();
this.p1 = p1.getProperty(); this.p7 = p7.getProperty();
this.p2 = p2.getProperty(); }
this.p3 = p3.getProperty();
this.p4 = p4.getProperty();
this.p5 = p5.getProperty();
this.p6 = p6.getProperty();
this.p7 = p7.getProperty();
}
@Override @Override
public Fun.Tuple7<A, B, C, D, E, F, G> apply(Row row) { public Fun.Tuple7<A, B, C, D, E, F, G> apply(Row row) {
return new Fun.Tuple7<A, B, C, D, E, F, G>( return new Fun.Tuple7<A, B, C, D, E, F, G>(provider.getColumnValue(row, 0, p1),
provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2), provider.getColumnValue(row, 2, p3),
provider.getColumnValue(row, 1, p2), provider.getColumnValue(row, 3, p4), provider.getColumnValue(row, 4, p5),
provider.getColumnValue(row, 2, p3), provider.getColumnValue(row, 5, p6), provider.getColumnValue(row, 6, p7));
provider.getColumnValue(row, 3, p4), }
provider.getColumnValue(row, 4, p5), }
provider.getColumnValue(row, 5, p6),
provider.getColumnValue(row, 6, p7));
}
}
} }

View file

@ -19,37 +19,37 @@ import java.util.HashMap;
import java.util.Map; import java.util.Map;
public enum Operator { public enum Operator {
EQ("=="), EQ("=="),
IN("in"), IN("in"),
GT(">"), GT(">"),
LT("<"), LT("<"),
GTE(">="), GTE(">="),
LTE("<="); LTE("<=");
private final String name; private static final Map<String, Operator> indexByName = new HashMap<String, Operator>();
private static final Map<String, Operator> indexByName = new HashMap<String, Operator>(); static {
for (Operator fo : Operator.values()) {
indexByName.put(fo.getName(), fo);
}
}
static { private final String name;
for (Operator fo : Operator.values()) {
indexByName.put(fo.getName(), fo);
}
}
private Operator(String name) { private Operator(String name) {
this.name = name; this.name = name;
} }
public String getName() { public static Operator findByOperator(String name) {
return name; return indexByName.get(name);
} }
public static Operator findByOperator(String name) { public String getName() {
return indexByName.get(name); return name;
} }
} }

View file

@ -1,8 +1,10 @@
package net.helenus.core; package net.helenus.core;
import java.util.Objects;
import com.datastax.driver.core.querybuilder.Ordering; import com.datastax.driver.core.querybuilder.Ordering;
import com.datastax.driver.core.querybuilder.QueryBuilder; import com.datastax.driver.core.querybuilder.QueryBuilder;
import java.util.Objects;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.ColumnType; import net.helenus.mapping.ColumnType;
import net.helenus.mapping.MappingUtil; import net.helenus.mapping.MappingUtil;
@ -11,34 +13,34 @@ import net.helenus.support.HelenusMappingException;
public final class Ordered { public final class Ordered {
private final Getter<?> getter; private final Getter<?> getter;
private final OrderingDirection direction; private final OrderingDirection direction;
public Ordered(Getter<?> getter, OrderingDirection direction) { public Ordered(Getter<?> getter, OrderingDirection direction) {
this.getter = getter; this.getter = getter;
this.direction = direction; this.direction = direction;
} }
public Ordering getOrdering() { public Ordering getOrdering() {
Objects.requireNonNull(getter, "property is null"); Objects.requireNonNull(getter, "property is null");
Objects.requireNonNull(direction, "direction is null"); Objects.requireNonNull(direction, "direction is null");
HelenusPropertyNode propNode = MappingUtil.resolveMappingProperty(getter); HelenusPropertyNode propNode = MappingUtil.resolveMappingProperty(getter);
if (propNode.getProperty().getColumnType() != ColumnType.CLUSTERING_COLUMN) { if (propNode.getProperty().getColumnType() != ColumnType.CLUSTERING_COLUMN) {
throw new HelenusMappingException( throw new HelenusMappingException(
"property must be a clustering column " + propNode.getProperty().getPropertyName()); "property must be a clustering column " + propNode.getProperty().getPropertyName());
} }
switch (direction) { switch (direction) {
case ASC: case ASC :
return QueryBuilder.asc(propNode.getColumnName()); return QueryBuilder.asc(propNode.getColumnName());
case DESC: case DESC :
return QueryBuilder.desc(propNode.getColumnName()); return QueryBuilder.desc(propNode.getColumnName());
} }
throw new HelenusMappingException("invalid direction " + direction); throw new HelenusMappingException("invalid direction " + direction);
} }
} }

View file

@ -1,29 +1,29 @@
package net.helenus.core; package net.helenus.core;
import java.util.*; import java.util.List;
import java.util.Objects; import java.util.Objects;
public class PostCommitFunction<T, R> implements java.util.function.Function<T, R> { public class PostCommitFunction<T, R> implements java.util.function.Function<T, R> {
private final UnitOfWork uow; private final UnitOfWork uow;
private final List<CommitThunk> postCommit; private final List<CommitThunk> postCommit;
PostCommitFunction(UnitOfWork uow, List<CommitThunk> postCommit) { PostCommitFunction(UnitOfWork uow, List<CommitThunk> postCommit) {
this.uow = uow; this.uow = uow;
this.postCommit = postCommit; this.postCommit = postCommit;
} }
public void andThen(CommitThunk after) { public void andThen(CommitThunk after) {
Objects.requireNonNull(after); Objects.requireNonNull(after);
if (postCommit == null) { if (postCommit == null) {
after.apply(); after.apply();
} else { } else {
postCommit.add(after); postCommit.add(after);
} }
} }
@Override @Override
public R apply(T t) { public R apply(T t) {
return null; return null;
} }
} }

View file

@ -17,80 +17,85 @@ package net.helenus.core;
import com.datastax.driver.core.querybuilder.Clause; import com.datastax.driver.core.querybuilder.Clause;
import com.datastax.driver.core.querybuilder.QueryBuilder; import com.datastax.driver.core.querybuilder.QueryBuilder;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.value.ColumnValuePreparer; import net.helenus.mapping.value.ColumnValuePreparer;
import net.helenus.support.HelenusMappingException; import net.helenus.support.HelenusMappingException;
public final class Postulate<V> { public final class Postulate<V> {
private final Operator operator; private final Operator operator;
private final V[] values; private final V[] values;
protected Postulate(Operator op, V[] values) { protected Postulate(Operator op, V[] values) {
this.operator = op; this.operator = op;
this.values = values; this.values = values;
} }
public static <V> Postulate<V> of(Operator op, V... values) { public static <V> Postulate<V> of(Operator op, V... values) {
return new Postulate<V>(op, values); return new Postulate<V>(op, values);
} }
public Clause getClause(HelenusPropertyNode node, ColumnValuePreparer valuePreparer) { public Clause getClause(HelenusPropertyNode node, ColumnValuePreparer valuePreparer) {
switch (operator) { switch (operator) {
case EQ: case EQ :
return QueryBuilder.eq( return QueryBuilder.eq(node.getColumnName(),
node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty())); valuePreparer.prepareColumnValue(values[0], node.getProperty()));
case IN: case IN :
Object[] preparedValues = new Object[values.length]; Object[] preparedValues = new Object[values.length];
for (int i = 0; i != values.length; ++i) { for (int i = 0; i != values.length; ++i) {
preparedValues[i] = valuePreparer.prepareColumnValue(values[i], node.getProperty()); preparedValues[i] = valuePreparer.prepareColumnValue(values[i], node.getProperty());
} }
return QueryBuilder.in(node.getColumnName(), preparedValues); return QueryBuilder.in(node.getColumnName(), preparedValues);
case LT: case LT :
return QueryBuilder.lt( return QueryBuilder.lt(node.getColumnName(),
node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty())); valuePreparer.prepareColumnValue(values[0], node.getProperty()));
case LTE: case LTE :
return QueryBuilder.lte( return QueryBuilder.lte(node.getColumnName(),
node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty())); valuePreparer.prepareColumnValue(values[0], node.getProperty()));
case GT: case GT :
return QueryBuilder.gt( return QueryBuilder.gt(node.getColumnName(),
node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty())); valuePreparer.prepareColumnValue(values[0], node.getProperty()));
case GTE: case GTE :
return QueryBuilder.gte( return QueryBuilder.gte(node.getColumnName(),
node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty())); valuePreparer.prepareColumnValue(values[0], node.getProperty()));
default: default :
throw new HelenusMappingException("unknown filter operation " + operator); throw new HelenusMappingException("unknown filter operation " + operator);
} }
} }
@Override public V[] values() {
public String toString() { return values;
}
if (operator == Operator.IN) { @Override
public String toString() {
if (values == null) { if (operator == Operator.IN) {
return "in()";
}
int len = values.length; if (values == null) {
StringBuilder b = new StringBuilder(); return "in()";
b.append("in("); }
for (int i = 0; i != len; i++) {
if (b.length() > 3) {
b.append(", ");
}
b.append(String.valueOf(values[i]));
}
return b.append(')').toString();
}
return operator.getName() + values[0]; int len = values.length;
} StringBuilder b = new StringBuilder();
b.append("in(");
for (int i = 0; i != len; i++) {
if (b.length() > 3) {
b.append(", ");
}
b.append(String.valueOf(values[i]));
}
return b.append(')').toString();
}
return operator.getName() + values[0];
}
} }

View file

@ -19,5 +19,5 @@ import net.helenus.mapping.HelenusProperty;
public interface PropertyValueValidator { public interface PropertyValueValidator {
void validate(HelenusProperty prop, Object value); void validate(HelenusProperty prop, Object value);
} }

View file

@ -15,80 +15,83 @@
*/ */
package net.helenus.core; package net.helenus.core;
import com.datastax.driver.core.querybuilder.BindMarker;
import com.datastax.driver.core.querybuilder.QueryBuilder;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import com.datastax.driver.core.querybuilder.BindMarker;
import com.datastax.driver.core.querybuilder.QueryBuilder;
import net.helenus.mapping.OrderingDirection; import net.helenus.mapping.OrderingDirection;
/** Sugar methods for the queries */ /** Sugar methods for the queries */
public final class Query { public final class Query {
private Query() {} private Query() {
}
public static BindMarker marker() { public static BindMarker marker() {
return QueryBuilder.bindMarker(); return QueryBuilder.bindMarker();
} }
public static BindMarker marker(String name) { public static BindMarker marker(String name) {
return QueryBuilder.bindMarker(name); return QueryBuilder.bindMarker(name);
} }
public static Ordered asc(Getter<?> getter) { public static Ordered asc(Getter<?> getter) {
return new Ordered(getter, OrderingDirection.ASC); return new Ordered(getter, OrderingDirection.ASC);
} }
public static Ordered desc(Getter<?> getter) { public static Ordered desc(Getter<?> getter) {
return new Ordered(getter, OrderingDirection.DESC); return new Ordered(getter, OrderingDirection.DESC);
} }
public static <V> Postulate<V> eq(V val) { public static <V> Postulate<V> eq(V val) {
return Postulate.of(Operator.EQ, val); return Postulate.of(Operator.EQ, val);
} }
public static <V> Postulate<V> lt(V val) { public static <V> Postulate<V> lt(V val) {
return Postulate.of(Operator.LT, val); return Postulate.of(Operator.LT, val);
} }
public static <V> Postulate<V> lte(V val) { public static <V> Postulate<V> lte(V val) {
return Postulate.of(Operator.LTE, val); return Postulate.of(Operator.LTE, val);
} }
public static <V> Postulate<V> gt(V val) { public static <V> Postulate<V> gt(V val) {
return Postulate.of(Operator.GT, val); return Postulate.of(Operator.GT, val);
} }
public static <V> Postulate<V> gte(V val) { public static <V> Postulate<V> gte(V val) {
return Postulate.of(Operator.GTE, val); return Postulate.of(Operator.GTE, val);
} }
public static <V> Postulate<V> in(V[] vals) { public static <V> Postulate<V> in(V[] vals) {
return new Postulate<V>(Operator.IN, vals); return new Postulate<V>(Operator.IN, vals);
} }
public static <K, V> Getter<V> getIdx(Getter<List<V>> listGetter, int index) { public static <K, V> Getter<V> getIdx(Getter<List<V>> listGetter, int index) {
Objects.requireNonNull(listGetter, "listGetter is null"); Objects.requireNonNull(listGetter, "listGetter is null");
return new Getter<V>() { return new Getter<V>() {
@Override @Override
public V get() { public V get() {
return listGetter.get().get(index); return listGetter.get().get(index);
} }
}; };
} }
public static <K, V> Getter<V> get(Getter<Map<K, V>> mapGetter, K k) { public static <K, V> Getter<V> get(Getter<Map<K, V>> mapGetter, K k) {
Objects.requireNonNull(mapGetter, "mapGetter is null"); Objects.requireNonNull(mapGetter, "mapGetter is null");
Objects.requireNonNull(k, "key is null"); Objects.requireNonNull(k, "key is null");
return new Getter<V>() { return new Getter<V>() {
@Override @Override
public V get() { public V get() {
return mapGetter.get().get(k); return mapGetter.get().get(k);
} }
}; };
} }
} }

View file

@ -15,15 +15,16 @@
*/ */
package net.helenus.core; package net.helenus.core;
import java.util.*;
import java.util.stream.Collectors;
import com.datastax.driver.core.*; import com.datastax.driver.core.*;
import com.datastax.driver.core.IndexMetadata;
import com.datastax.driver.core.querybuilder.IsNotNullClause; import com.datastax.driver.core.querybuilder.IsNotNullClause;
import com.datastax.driver.core.querybuilder.QueryBuilder; import com.datastax.driver.core.querybuilder.QueryBuilder;
import com.datastax.driver.core.querybuilder.Select; import com.datastax.driver.core.querybuilder.Select;
import com.datastax.driver.core.schemabuilder.*; import com.datastax.driver.core.schemabuilder.*;
import com.datastax.driver.core.schemabuilder.Create.Options; import com.datastax.driver.core.schemabuilder.Create.Options;
import java.util.*;
import java.util.stream.Collectors;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.*; import net.helenus.mapping.*;
import net.helenus.mapping.ColumnType; import net.helenus.mapping.ColumnType;
@ -32,417 +33,394 @@ import net.helenus.mapping.type.OptionalColumnMetadata;
import net.helenus.support.CqlUtil; import net.helenus.support.CqlUtil;
import net.helenus.support.HelenusMappingException; import net.helenus.support.HelenusMappingException;
public final class SchemaUtil { public final class SchemaUtil {
private SchemaUtil() {} private SchemaUtil() {
}
public static RegularStatement use(String keyspace, boolean forceQuote) { public static RegularStatement use(String keyspace, boolean forceQuote) {
if (forceQuote) { if (forceQuote) {
return new SimpleStatement("USE" + CqlUtil.forceQuote(keyspace)); return new SimpleStatement("USE" + CqlUtil.forceQuote(keyspace));
} else { } else {
return new SimpleStatement("USE " + keyspace); return new SimpleStatement("USE " + keyspace);
} }
} }
public static SchemaStatement createUserType(HelenusEntity entity) { public static SchemaStatement createUserType(HelenusEntity entity) {
if (entity.getType() != HelenusEntityType.UDT) { if (entity.getType() != HelenusEntityType.UDT) {
throw new HelenusMappingException("expected UDT entity " + entity); throw new HelenusMappingException("expected UDT entity " + entity);
} }
CreateType create = SchemaBuilder.createType(entity.getName().toCql()); CreateType create = SchemaBuilder.createType(entity.getName().toCql());
for (HelenusProperty prop : entity.getOrderedProperties()) { for (HelenusProperty prop : entity.getOrderedProperties()) {
ColumnType columnType = prop.getColumnType(); ColumnType columnType = prop.getColumnType();
if (columnType == ColumnType.PARTITION_KEY || columnType == ColumnType.CLUSTERING_COLUMN) { if (columnType == ColumnType.PARTITION_KEY || columnType == ColumnType.CLUSTERING_COLUMN) {
throw new HelenusMappingException( throw new HelenusMappingException("primary key columns are not supported in UserDefinedType for "
"primary key columns are not supported in UserDefinedType for " + prop.getPropertyName() + " in entity " + entity);
+ prop.getPropertyName() }
+ " in entity "
+ entity);
}
try { try {
prop.getDataType().addColumn(create, prop.getColumnName()); prop.getDataType().addColumn(create, prop.getColumnName());
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
throw new HelenusMappingException( throw new HelenusMappingException("invalid column name '" + prop.getColumnName() + "' in entity '"
"invalid column name '" + entity.getName().getName() + "'", e);
+ prop.getColumnName() }
+ "' in entity '" }
+ entity.getName().getName()
+ "'",
e);
}
}
return create;
}
public static List<SchemaStatement> alterUserType(
UserType userType, HelenusEntity entity, boolean dropUnusedColumns) {
if (entity.getType() != HelenusEntityType.UDT) {
throw new HelenusMappingException("expected UDT entity " + entity);
}
List<SchemaStatement> result = new ArrayList<SchemaStatement>();
/**
* TODO: In future replace SchemaBuilder.alterTable by SchemaBuilder.alterType when it will
* exist
*/
Alter alter = SchemaBuilder.alterTable(entity.getName().toCql());
final Set<String> visitedColumns =
dropUnusedColumns ? new HashSet<String>() : Collections.<String>emptySet();
for (HelenusProperty prop : entity.getOrderedProperties()) {
String columnName = prop.getColumnName().getName();
if (dropUnusedColumns) {
visitedColumns.add(columnName);
}
ColumnType columnType = prop.getColumnType();
if (columnType == ColumnType.PARTITION_KEY || columnType == ColumnType.CLUSTERING_COLUMN) {
continue;
}
DataType dataType = userType.getFieldType(columnName);
SchemaStatement stmt =
prop.getDataType()
.alterColumn(alter, prop.getColumnName(), optional(columnName, dataType));
if (stmt != null) {
result.add(stmt);
}
}
if (dropUnusedColumns) {
for (String field : userType.getFieldNames()) {
if (!visitedColumns.contains(field)) {
result.add(alter.dropColumn(field));
}
}
}
return result;
}
public static SchemaStatement dropUserType(HelenusEntity entity) {
if (entity.getType() != HelenusEntityType.UDT) {
throw new HelenusMappingException("expected UDT entity " + entity);
}
return SchemaBuilder.dropType(entity.getName().toCql()).ifExists();
}
public static SchemaStatement dropUserType(UserType type) {
return SchemaBuilder.dropType(type.getTypeName()).ifExists();
}
public static SchemaStatement createMaterializedView(
String keyspace, String viewName, HelenusEntity entity) {
if (entity.getType() != HelenusEntityType.VIEW) {
throw new HelenusMappingException("expected view entity " + entity);
}
if (entity == null) {
throw new HelenusMappingException("no entity or table to select data");
}
List<HelenusPropertyNode> props = new ArrayList<HelenusPropertyNode>();
entity
.getOrderedProperties()
.stream()
.map(p -> new HelenusPropertyNode(p, Optional.empty()))
.forEach(p -> props.add(p));
Select.Selection selection = QueryBuilder.select();
for (HelenusPropertyNode prop : props) {
String columnName = prop.getColumnName();
selection = selection.column(columnName);
}
Class<?> iface = entity.getMappingInterface();
String tableName = Helenus.entity(iface.getInterfaces()[0]).getName().toCql();
Select.Where where = selection.from(tableName).where();
List<String> p = new ArrayList<String>(props.size());
List<String> c = new ArrayList<String>(props.size());
List<String> o = new ArrayList<String>(props.size());
for (HelenusPropertyNode prop : props) { return create;
String columnName = prop.getColumnName(); }
switch (prop.getProperty().getColumnType()) {
case PARTITION_KEY:
p.add(columnName);
where = where.and(new IsNotNullClause(columnName));
break;
case CLUSTERING_COLUMN: public static List<SchemaStatement> alterUserType(UserType userType, HelenusEntity entity,
c.add(columnName); boolean dropUnusedColumns) {
where = where.and(new IsNotNullClause(columnName));
ClusteringColumn clusteringColumn = prop.getProperty().getGetterMethod().getAnnotation(ClusteringColumn.class); if (entity.getType() != HelenusEntityType.UDT) {
if (clusteringColumn != null && clusteringColumn.ordering() != null) { throw new HelenusMappingException("expected UDT entity " + entity);
o.add(columnName + " " + clusteringColumn.ordering().cql()); }
}
break;
default:
break;
}
}
String primaryKey = List<SchemaStatement> result = new ArrayList<SchemaStatement>();
"PRIMARY KEY ("
+ ((p.size() > 1) ? "(" + String.join(", ", p) + ")" : p.get(0))
+ ((c.size() > 0)
? ", " + ((c.size() > 1) ? "(" + String.join(", ", c) + ")" : c.get(0))
: "")
+ ")";
String clustering = ""; /**
if (o.size() > 0) { * TODO: In future replace SchemaBuilder.alterTable by SchemaBuilder.alterType
clustering = "WITH CLUSTERING ORDER BY (" + String.join(", ", o) + ")"; * when it will exist
} */
return new CreateMaterializedView(keyspace, viewName, where, primaryKey, clustering); Alter alter = SchemaBuilder.alterTable(entity.getName().toCql());
}
public static SchemaStatement dropMaterializedView( final Set<String> visitedColumns = dropUnusedColumns ? new HashSet<String>() : Collections.<String>emptySet();
String keyspace, String viewName, HelenusEntity entity) {
return new DropMaterializedView(keyspace, viewName); for (HelenusProperty prop : entity.getOrderedProperties()) {
}
String columnName = prop.getColumnName().getName();
public static SchemaStatement createTable(HelenusEntity entity) {
if (dropUnusedColumns) {
if (entity.getType() != HelenusEntityType.TABLE) { visitedColumns.add(columnName);
throw new HelenusMappingException("expected table entity " + entity); }
}
ColumnType columnType = prop.getColumnType();
// NOTE: There is a bug in the normal path of createTable where the
// "cache" is set too early and never unset preventing more than if (columnType == ColumnType.PARTITION_KEY || columnType == ColumnType.CLUSTERING_COLUMN) {
// one column on a table. continue;
// SchemaBuilder.createTable(entity.getName().toCql()); }
CreateTable create = new CreateTable(entity.getName().toCql());
DataType dataType = userType.getFieldType(columnName);
create.ifNotExists(); SchemaStatement stmt = prop.getDataType().alterColumn(alter, prop.getColumnName(),
optional(columnName, dataType));
List<HelenusProperty> clusteringColumns = new ArrayList<HelenusProperty>();
if (stmt != null) {
for (HelenusProperty prop : entity.getOrderedProperties()) { result.add(stmt);
}
ColumnType columnType = prop.getColumnType(); }
if (columnType == ColumnType.CLUSTERING_COLUMN) { if (dropUnusedColumns) {
clusteringColumns.add(prop); for (String field : userType.getFieldNames()) {
} if (!visitedColumns.contains(field)) {
prop.getDataType().addColumn(create, prop.getColumnName()); result.add(alter.dropColumn(field));
} }
}
if (!clusteringColumns.isEmpty()) { }
Options options = create.withOptions();
clusteringColumns.forEach( return result;
p -> options.clusteringOrder(p.getColumnName().toCql(), mapDirection(p.getOrdering()))); }
}
public static SchemaStatement dropUserType(HelenusEntity entity) {
return create;
} if (entity.getType() != HelenusEntityType.UDT) {
throw new HelenusMappingException("expected UDT entity " + entity);
public static List<SchemaStatement> alterTable( }
TableMetadata tmd, HelenusEntity entity, boolean dropUnusedColumns) {
return SchemaBuilder.dropType(entity.getName().toCql()).ifExists();
if (entity.getType() != HelenusEntityType.TABLE) { }
throw new HelenusMappingException("expected table entity " + entity);
} public static SchemaStatement dropUserType(UserType type) {
List<SchemaStatement> result = new ArrayList<SchemaStatement>(); return SchemaBuilder.dropType(type.getTypeName()).ifExists();
}
Alter alter = SchemaBuilder.alterTable(entity.getName().toCql());
public static String createPrimaryKeyPhrase(Collection<HelenusProperty> properties) {
final Set<String> visitedColumns = List<String> p = new ArrayList<String>(properties.size());
dropUnusedColumns ? new HashSet<String>() : Collections.<String>emptySet(); List<String> c = new ArrayList<String>(properties.size());
for (HelenusProperty prop : entity.getOrderedProperties()) { for (HelenusProperty prop : properties) {
String columnName = prop.getColumnName().toCql();
String columnName = prop.getColumnName().getName(); switch (prop.getColumnType()) {
case PARTITION_KEY :
if (dropUnusedColumns) { p.add(columnName);
visitedColumns.add(columnName); break;
} case CLUSTERING_COLUMN :
c.add(columnName);
ColumnType columnType = prop.getColumnType(); break;
default :
if (columnType == ColumnType.PARTITION_KEY || columnType == ColumnType.CLUSTERING_COLUMN) { break;
continue; }
} }
ColumnMetadata columnMetadata = tmd.getColumn(columnName); return "(" + ((p.size() > 1) ? "(" + String.join(", ", p) + ")" : p.get(0))
SchemaStatement stmt = + ((c.size() > 0) ? ", " + ((c.size() > 1) ? "(" + String.join(", ", c) + ")" : c.get(0)) : "") + ")";
prop.getDataType().alterColumn(alter, prop.getColumnName(), optional(columnMetadata)); }
if (stmt != null) { public static SchemaStatement createMaterializedView(String keyspace, String viewName, HelenusEntity entity) {
result.add(stmt); if (entity.getType() != HelenusEntityType.VIEW) {
} throw new HelenusMappingException("expected view entity " + entity);
} }
if (dropUnusedColumns) { if (entity == null) {
for (ColumnMetadata cm : tmd.getColumns()) { throw new HelenusMappingException("no entity or table to select data");
if (!visitedColumns.contains(cm.getName())) { }
result.add(alter.dropColumn(cm.getName())); List<HelenusPropertyNode> props = new ArrayList<HelenusPropertyNode>();
} entity.getOrderedProperties().stream().map(p -> new HelenusPropertyNode(p, Optional.empty()))
} .forEach(p -> props.add(p));
}
Select.Selection selection = QueryBuilder.select();
return result;
} for (HelenusPropertyNode prop : props) {
String columnName = prop.getColumnName();
public static SchemaStatement dropTable(HelenusEntity entity) { selection = selection.column(columnName);
}
if (entity.getType() != HelenusEntityType.TABLE) { Class<?> iface = entity.getMappingInterface();
throw new HelenusMappingException("expected table entity " + entity); String tableName = Helenus.entity(iface.getInterfaces()[0]).getName().toCql();
} Select.Where where = selection.from(tableName).where();
List<String> o = new ArrayList<String>(props.size());
return SchemaBuilder.dropTable(entity.getName().toCql()).ifExists();
} for (HelenusPropertyNode prop : props) {
String columnName = prop.getColumnName();
public static SchemaStatement createIndex(HelenusProperty prop) { switch (prop.getProperty().getColumnType()) {
if (prop.caseSensitiveIndex()) { case PARTITION_KEY :
return SchemaBuilder.createIndex(prop.getIndexName().get().toCql()) where = where.and(new IsNotNullClause(columnName));
.ifNotExists() break;
.onTable(prop.getEntity().getName().toCql())
.andColumn(prop.getColumnName().toCql()); case CLUSTERING_COLUMN :
} else { where = where.and(new IsNotNullClause(columnName));
return new CreateSasiIndex(prop.getIndexName().get().toCql())
.ifNotExists() ClusteringColumn clusteringColumn = prop.getProperty().getGetterMethod()
.onTable(prop.getEntity().getName().toCql()) .getAnnotation(ClusteringColumn.class);
.andColumn(prop.getColumnName().toCql()); if (clusteringColumn != null && clusteringColumn.ordering() != null) {
} o.add(columnName + " " + clusteringColumn.ordering().cql());
} }
break;
public static List<SchemaStatement> createIndexes(HelenusEntity entity) { default :
break;
return entity }
.getOrderedProperties() }
.stream()
.filter(p -> p.getIndexName().isPresent()) String primaryKey = "PRIMARY KEY " + createPrimaryKeyPhrase(entity.getOrderedProperties());
.map(p -> SchemaUtil.createIndex(p))
.collect(Collectors.toList()); String clustering = "";
} if (o.size() > 0) {
clustering = "WITH CLUSTERING ORDER BY (" + String.join(", ", o) + ")";
public static List<SchemaStatement> alterIndexes( }
TableMetadata tmd, HelenusEntity entity, boolean dropUnusedIndexes) { return new CreateMaterializedView(keyspace, viewName, where, primaryKey, clustering).ifNotExists();
}
List<SchemaStatement> list = new ArrayList<SchemaStatement>();
public static SchemaStatement dropMaterializedView(String keyspace, String viewName, HelenusEntity entity) {
final Set<String> visitedColumns = return new DropMaterializedView(keyspace, viewName);
dropUnusedIndexes ? new HashSet<String>() : Collections.<String>emptySet(); }
entity public static SchemaStatement createTable(HelenusEntity entity) {
.getOrderedProperties()
.stream() if (entity.getType() != HelenusEntityType.TABLE) {
.filter(p -> p.getIndexName().isPresent()) throw new HelenusMappingException("expected table entity " + entity);
.forEach( }
p -> {
String columnName = p.getColumnName().getName(); // NOTE: There is a bug in the normal path of createTable where the
// "cache" is set too early and never unset preventing more than
if (dropUnusedIndexes) { // one column on a table.
visitedColumns.add(columnName); // SchemaBuilder.createTable(entity.getName().toCql());
} CreateTable create = new CreateTable(entity.getName().toCql());
ColumnMetadata cm = tmd.getColumn(columnName); create.ifNotExists();
if (cm != null) { List<HelenusProperty> clusteringColumns = new ArrayList<HelenusProperty>();
IndexMetadata im = tmd.getIndex(columnName);
if (im == null) { for (HelenusProperty prop : entity.getOrderedProperties()) {
list.add(createIndex(p));
} ColumnType columnType = prop.getColumnType();
} else {
list.add(createIndex(p)); if (columnType == ColumnType.CLUSTERING_COLUMN) {
} clusteringColumns.add(prop);
}); }
if (dropUnusedIndexes) { prop.getDataType().addColumn(create, prop.getColumnName());
}
tmd.getColumns()
.stream() if (!clusteringColumns.isEmpty()) {
.filter(c -> tmd.getIndex(c.getName()) != null && !visitedColumns.contains(c.getName())) Options options = create.withOptions();
.forEach( clusteringColumns
c -> { .forEach(p -> options.clusteringOrder(p.getColumnName().toCql(), mapDirection(p.getOrdering())));
list.add(SchemaBuilder.dropIndex(tmd.getIndex(c.getName()).getName()).ifExists()); }
});
} return create;
}
return list;
} public static List<SchemaStatement> alterTable(TableMetadata tmd, HelenusEntity entity, boolean dropUnusedColumns) {
public static SchemaStatement dropIndex(HelenusProperty prop) { if (entity.getType() != HelenusEntityType.TABLE) {
return SchemaBuilder.dropIndex(prop.getIndexName().get().toCql()).ifExists(); throw new HelenusMappingException("expected table entity " + entity);
} }
private static SchemaBuilder.Direction mapDirection(OrderingDirection o) { List<SchemaStatement> result = new ArrayList<SchemaStatement>();
switch (o) {
case ASC: Alter alter = SchemaBuilder.alterTable(entity.getName().toCql());
return SchemaBuilder.Direction.ASC;
case DESC: final Set<String> visitedColumns = dropUnusedColumns ? new HashSet<String>() : Collections.<String>emptySet();
return SchemaBuilder.Direction.DESC;
} for (HelenusProperty prop : entity.getOrderedProperties()) {
throw new HelenusMappingException("unknown ordering " + o);
} String columnName = prop.getColumnName().getName();
public static void throwNoMapping(HelenusProperty prop) { if (dropUnusedColumns) {
visitedColumns.add(columnName);
throw new HelenusMappingException( }
"only primitive types and Set,List,Map collections and UserDefinedTypes are allowed, unknown type for property '"
+ prop.getPropertyName() ColumnType columnType = prop.getColumnType();
+ "' type is '"
+ prop.getJavaType() if (columnType == ColumnType.PARTITION_KEY || columnType == ColumnType.CLUSTERING_COLUMN) {
+ "' in the entity " continue;
+ prop.getEntity()); }
}
ColumnMetadata columnMetadata = tmd.getColumn(columnName);
private static OptionalColumnMetadata optional(final ColumnMetadata columnMetadata) { SchemaStatement stmt = prop.getDataType().alterColumn(alter, prop.getColumnName(),
if (columnMetadata != null) { optional(columnMetadata));
return new OptionalColumnMetadata() {
if (stmt != null) {
@Override result.add(stmt);
public String getName() { }
return columnMetadata.getName(); }
}
if (dropUnusedColumns) {
@Override for (ColumnMetadata cm : tmd.getColumns()) {
public DataType getType() { if (!visitedColumns.contains(cm.getName())) {
return columnMetadata.getType();
} result.add(alter.dropColumn(cm.getName()));
}; }
} }
return null; }
}
return result;
private static OptionalColumnMetadata optional(final String name, final DataType dataType) { }
if (dataType != null) {
return new OptionalColumnMetadata() { public static SchemaStatement dropTable(HelenusEntity entity) {
@Override if (entity.getType() != HelenusEntityType.TABLE) {
public String getName() { throw new HelenusMappingException("expected table entity " + entity);
return name; }
}
return SchemaBuilder.dropTable(entity.getName().toCql()).ifExists();
@Override }
public DataType getType() {
return dataType; public static SchemaStatement createIndex(HelenusProperty prop) {
} if (prop.caseSensitiveIndex()) {
}; return SchemaBuilder.createIndex(prop.getIndexName().get().toCql()).ifNotExists()
} .onTable(prop.getEntity().getName().toCql()).andColumn(prop.getColumnName().toCql());
return null; } else {
} return new CreateSasiIndex(prop.getIndexName().get().toCql()).ifNotExists()
.onTable(prop.getEntity().getName().toCql()).andColumn(prop.getColumnName().toCql());
}
}
public static List<SchemaStatement> createIndexes(HelenusEntity entity) {
return entity.getOrderedProperties().stream().filter(p -> p.getIndexName().isPresent())
.map(p -> SchemaUtil.createIndex(p)).collect(Collectors.toList());
}
public static List<SchemaStatement> alterIndexes(TableMetadata tmd, HelenusEntity entity,
boolean dropUnusedIndexes) {
List<SchemaStatement> list = new ArrayList<SchemaStatement>();
final Set<String> visitedColumns = dropUnusedIndexes ? new HashSet<String>() : Collections.<String>emptySet();
entity.getOrderedProperties().stream().filter(p -> p.getIndexName().isPresent()).forEach(p -> {
String columnName = p.getColumnName().getName();
if (dropUnusedIndexes) {
visitedColumns.add(columnName);
}
ColumnMetadata cm = tmd.getColumn(columnName);
if (cm != null) {
IndexMetadata im = tmd.getIndex(columnName);
if (im == null) {
list.add(createIndex(p));
}
} else {
list.add(createIndex(p));
}
});
if (dropUnusedIndexes) {
tmd.getColumns().stream()
.filter(c -> tmd.getIndex(c.getName()) != null && !visitedColumns.contains(c.getName()))
.forEach(c -> {
list.add(SchemaBuilder.dropIndex(tmd.getIndex(c.getName()).getName()).ifExists());
});
}
return list;
}
public static SchemaStatement dropIndex(HelenusProperty prop) {
return SchemaBuilder.dropIndex(prop.getIndexName().get().toCql()).ifExists();
}
private static SchemaBuilder.Direction mapDirection(OrderingDirection o) {
switch (o) {
case ASC :
return SchemaBuilder.Direction.ASC;
case DESC :
return SchemaBuilder.Direction.DESC;
}
throw new HelenusMappingException("unknown ordering " + o);
}
public static void throwNoMapping(HelenusProperty prop) {
throw new HelenusMappingException(
"only primitive types and Set,List,Map collections and UserDefinedTypes are allowed, unknown type for property '"
+ prop.getPropertyName() + "' type is '" + prop.getJavaType() + "' in the entity "
+ prop.getEntity());
}
private static OptionalColumnMetadata optional(final ColumnMetadata columnMetadata) {
if (columnMetadata != null) {
return new OptionalColumnMetadata() {
@Override
public String getName() {
return columnMetadata.getName();
}
@Override
public DataType getType() {
return columnMetadata.getType();
}
};
}
return null;
}
private static OptionalColumnMetadata optional(final String name, final DataType dataType) {
if (dataType != null) {
return new OptionalColumnMetadata() {
@Override
public String getName() {
return name;
}
@Override
public DataType getType() {
return dataType;
}
};
}
return null;
}
} }

View file

@ -15,16 +15,18 @@
*/ */
package net.helenus.core; package net.helenus.core;
import brave.Tracer;
import com.codahale.metrics.MetricRegistry;
import com.datastax.driver.core.*;
import com.google.common.util.concurrent.MoreExecutors;
import java.io.IOException; import java.io.IOException;
import java.io.PrintStream; import java.io.PrintStream;
import java.util.*; import java.util.*;
import java.util.concurrent.Executor; import java.util.concurrent.Executor;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
import java.util.function.Consumer; import java.util.function.Consumer;
import com.codahale.metrics.MetricRegistry;
import com.datastax.driver.core.*;
import com.google.common.util.concurrent.MoreExecutors;
import brave.Tracer;
import net.helenus.core.reflect.DslExportable; import net.helenus.core.reflect.DslExportable;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.HelenusEntityType; import net.helenus.mapping.HelenusEntityType;
@ -37,399 +39,344 @@ import net.helenus.support.PackageUtil;
public final class SessionInitializer extends AbstractSessionOperations { public final class SessionInitializer extends AbstractSessionOperations {
private final Session session; private final Session session;
private CodecRegistry registry; private final List<Either<Object, Class<?>>> initList = new ArrayList<Either<Object, Class<?>>>();
private String usingKeyspace; private CodecRegistry registry;
private boolean showCql = false; private String usingKeyspace;
private ConsistencyLevel consistencyLevel; private boolean showCql = false;
private boolean idempotent = true; private ConsistencyLevel consistencyLevel;
private MetricRegistry metricRegistry = new MetricRegistry(); private boolean idempotent = true;
private Tracer zipkinTracer; private MetricRegistry metricRegistry = new MetricRegistry();
private PrintStream printStream = System.out; private Tracer zipkinTracer;
private Executor executor = MoreExecutors.directExecutor(); private PrintStream printStream = System.out;
private Class<? extends UnitOfWork> unitOfWorkClass = UnitOfWorkImpl.class; private Executor executor = MoreExecutors.directExecutor();
private Class<? extends UnitOfWork> unitOfWorkClass = UnitOfWorkImpl.class;
private SessionRepositoryBuilder sessionRepository; private SessionRepositoryBuilder sessionRepository;
private boolean dropUnusedColumns = false;
private boolean dropUnusedColumns = false; private boolean dropUnusedIndexes = false;
private boolean dropUnusedIndexes = false; private KeyspaceMetadata keyspaceMetadata;
private AutoDdl autoDdl = AutoDdl.UPDATE;
private KeyspaceMetadata keyspaceMetadata;
SessionInitializer(Session session) {
private final List<Either<Object, Class<?>>> initList = new ArrayList<Either<Object, Class<?>>>(); this.session = Objects.requireNonNull(session, "empty session");
private AutoDdl autoDdl = AutoDdl.UPDATE; this.usingKeyspace = session.getLoggedKeyspace(); // can be null
this.sessionRepository = new SessionRepositoryBuilder(session);
SessionInitializer(Session session) { }
this.session = Objects.requireNonNull(session, "empty session");
this.usingKeyspace = session.getLoggedKeyspace(); // can be null @Override
this.sessionRepository = new SessionRepositoryBuilder(session); public Session currentSession() {
} return session;
}
@Override
public Session currentSession() { @Override
return session; public String usingKeyspace() {
} return usingKeyspace;
}
@Override
public String usingKeyspace() { @Override
return usingKeyspace; public Executor getExecutor() {
} return executor;
}
@Override
public Executor getExecutor() { @Override
return executor; public SessionRepository getSessionRepository() {
} throw new HelenusException("not expected to call");
}
@Override
public SessionRepository getSessionRepository() { @Override
throw new HelenusException("not expected to call"); public ColumnValueProvider getValueProvider() {
} throw new HelenusException("not expected to call");
}
@Override
public ColumnValueProvider getValueProvider() { @Override
throw new HelenusException("not expected to call"); public ColumnValuePreparer getValuePreparer() {
} throw new HelenusException("not expected to call");
}
@Override
public ColumnValuePreparer getValuePreparer() { public SessionInitializer showCql() {
throw new HelenusException("not expected to call"); this.showCql = true;
} return this;
}
public SessionInitializer showCql() {
this.showCql = true; public SessionInitializer showCql(boolean enabled) {
return this; this.showCql = enabled;
} return this;
}
public SessionInitializer showCql(boolean enabled) {
this.showCql = enabled; public SessionInitializer metricRegistry(MetricRegistry metricRegistry) {
return this; this.metricRegistry = metricRegistry;
} return this;
}
public SessionInitializer metricRegistry(MetricRegistry metricRegistry) {
this.metricRegistry = metricRegistry; public SessionInitializer zipkinTracer(Tracer tracer) {
return this; this.zipkinTracer = tracer;
} return this;
}
public SessionInitializer zipkinTracer(Tracer tracer) {
this.zipkinTracer = tracer; public SessionInitializer setUnitOfWorkClass(Class<? extends UnitOfWork> e) {
return this; this.unitOfWorkClass = e;
} return this;
}
public SessionInitializer setUnitOfWorkClass(Class<? extends UnitOfWork> e) {
this.unitOfWorkClass = e; public SessionInitializer consistencyLevel(ConsistencyLevel consistencyLevel) {
return this; this.consistencyLevel = consistencyLevel;
} return this;
}
public SessionInitializer consistencyLevel(ConsistencyLevel consistencyLevel) {
this.consistencyLevel = consistencyLevel; public ConsistencyLevel getDefaultConsistencyLevel() {
return this; return consistencyLevel;
} }
public ConsistencyLevel getDefaultConsistencyLevel() { public SessionInitializer idempotentQueryExecution(boolean idempotent) {
return consistencyLevel; this.idempotent = idempotent;
} return this;
}
public SessionInitializer idempotentQueryExecution(boolean idempotent) {
this.idempotent = idempotent; public boolean getDefaultQueryIdempotency() {
return this; return idempotent;
} }
public boolean getDefaultQueryIdempotency() { @Override
return idempotent; public PrintStream getPrintStream() {
} return printStream;
}
@Override
public PrintStream getPrintStream() { public SessionInitializer printTo(PrintStream out) {
return printStream; this.printStream = out;
} return this;
}
public SessionInitializer printTo(PrintStream out) {
this.printStream = out; public SessionInitializer withExecutor(Executor executor) {
return this; Objects.requireNonNull(executor, "empty executor");
} this.executor = executor;
return this;
public SessionInitializer withExecutor(Executor executor) { }
Objects.requireNonNull(executor, "empty executor");
this.executor = executor; public SessionInitializer withCachingExecutor() {
return this; this.executor = Executors.newCachedThreadPool();
} return this;
}
public SessionInitializer withCachingExecutor() {
this.executor = Executors.newCachedThreadPool(); public SessionInitializer dropUnusedColumns(boolean enabled) {
return this; this.dropUnusedColumns = enabled;
} return this;
}
public SessionInitializer dropUnusedColumns(boolean enabled) {
this.dropUnusedColumns = enabled; public SessionInitializer dropUnusedIndexes(boolean enabled) {
return this; this.dropUnusedIndexes = enabled;
} return this;
}
public SessionInitializer dropUnusedIndexes(boolean enabled) {
this.dropUnusedIndexes = enabled; public SessionInitializer withCodecRegistry(CodecRegistry registry) {
return this; this.registry = registry;
} return this;
}
public SessionInitializer withCodecRegistry(CodecRegistry registry) {
this.registry = registry; @Override
return this; public boolean isShowCql() {
} return showCql;
}
@Override
public boolean isShowCql() { public SessionInitializer addPackage(String packageName) {
return showCql; try {
} PackageUtil.getClasses(packageName).stream().filter(c -> c.isInterface() && !c.isAnnotation())
.forEach(clazz -> {
public SessionInitializer addPackage(String packageName) { initList.add(Either.right(clazz));
try { });
PackageUtil.getClasses(packageName) } catch (IOException | ClassNotFoundException e) {
.stream() throw new HelenusException("fail to add package " + packageName, e);
.filter(c -> c.isInterface() && !c.isAnnotation()) }
.forEach( return this;
clazz -> { }
initList.add(Either.right(clazz));
}); public SessionInitializer add(Object... dsls) {
} catch (IOException | ClassNotFoundException e) { Objects.requireNonNull(dsls, "dsls is empty");
throw new HelenusException("fail to add package " + packageName, e); int len = dsls.length;
} for (int i = 0; i != len; ++i) {
return this; Object obj = Objects.requireNonNull(dsls[i], "element " + i + " is empty");
} initList.add(Either.left(obj));
}
public SessionInitializer add(Object... dsls) { return this;
Objects.requireNonNull(dsls, "dsls is empty"); }
int len = dsls.length;
for (int i = 0; i != len; ++i) { public SessionInitializer autoValidate() {
Object obj = Objects.requireNonNull(dsls[i], "element " + i + " is empty"); this.autoDdl = AutoDdl.VALIDATE;
initList.add(Either.left(obj)); return this;
} }
return this;
} public SessionInitializer autoUpdate() {
this.autoDdl = AutoDdl.UPDATE;
public SessionInitializer autoValidate() { return this;
this.autoDdl = AutoDdl.VALIDATE; }
return this;
} public SessionInitializer autoCreate() {
this.autoDdl = AutoDdl.CREATE;
public SessionInitializer autoUpdate() { return this;
this.autoDdl = AutoDdl.UPDATE; }
return this;
} public SessionInitializer autoCreateDrop() {
this.autoDdl = AutoDdl.CREATE_DROP;
public SessionInitializer autoCreate() { return this;
this.autoDdl = AutoDdl.CREATE; }
return this;
} public SessionInitializer auto(AutoDdl autoDdl) {
this.autoDdl = autoDdl;
public SessionInitializer autoCreateDrop() { return this;
this.autoDdl = AutoDdl.CREATE_DROP; }
return this;
} public SessionInitializer use(String keyspace) {
session.execute(SchemaUtil.use(keyspace, false));
public SessionInitializer auto(AutoDdl autoDdl) { this.usingKeyspace = keyspace;
this.autoDdl = autoDdl; return this;
return this; }
}
public SessionInitializer use(String keyspace, boolean forceQuote) {
public SessionInitializer use(String keyspace) { session.execute(SchemaUtil.use(keyspace, forceQuote));
session.execute(SchemaUtil.use(keyspace, false)); this.usingKeyspace = keyspace;
this.usingKeyspace = keyspace; return this;
return this; }
}
public void singleton() {
public SessionInitializer use(String keyspace, boolean forceQuote) { Helenus.setSession(get());
session.execute(SchemaUtil.use(keyspace, forceQuote)); }
this.usingKeyspace = keyspace;
return this; public synchronized HelenusSession get() {
} initialize();
return new HelenusSession(session, usingKeyspace, registry, showCql, printStream, sessionRepository, executor,
public void singleton() { autoDdl == AutoDdl.CREATE_DROP, consistencyLevel, idempotent, unitOfWorkClass, metricRegistry,
Helenus.setSession(get()); zipkinTracer);
} }
public synchronized HelenusSession get() { private void initialize() {
initialize();
return new HelenusSession( Objects.requireNonNull(usingKeyspace, "please define keyspace by 'use' operator");
session,
usingKeyspace, initList.forEach((either) -> {
registry, Class<?> iface = null;
showCql, if (either.isLeft()) {
printStream, iface = MappingUtil.getMappingInterface(either.getLeft());
sessionRepository, } else {
executor, iface = either.getRight();
autoDdl == AutoDdl.CREATE_DROP, }
consistencyLevel,
idempotent, DslExportable dsl = (DslExportable) Helenus.dsl(iface);
unitOfWorkClass, dsl.setCassandraMetadataForHelenusSession(session.getCluster().getMetadata());
metricRegistry, sessionRepository.add(dsl);
zipkinTracer); });
}
TableOperations tableOps = new TableOperations(this, dropUnusedColumns, dropUnusedIndexes);
private void initialize() { UserTypeOperations userTypeOps = new UserTypeOperations(this, dropUnusedColumns);
Objects.requireNonNull(usingKeyspace, "please define keyspace by 'use' operator"); switch (autoDdl) {
case CREATE_DROP :
initList.forEach(
(either) -> { // Drop view first, otherwise a `DROP TABLE ...` will fail as the type is still
Class<?> iface = null; // referenced
if (either.isLeft()) { // by a view.
iface = MappingUtil.getMappingInterface(either.getLeft()); sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.VIEW)
} else { .forEach(e -> tableOps.dropView(e));
iface = either.getRight();
} // Drop tables second, before DROP TYPE otherwise a `DROP TYPE ...` will fail as
// the type is
DslExportable dsl = (DslExportable) Helenus.dsl(iface); // still referenced by a table.
dsl.setCassandraMetadataForHelenusSesion(session.getCluster().getMetadata()); sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.TABLE)
sessionRepository.add(dsl); .forEach(e -> tableOps.dropTable(e));
});
eachUserTypeInReverseOrder(userTypeOps, e -> userTypeOps.dropUserType(e));
TableOperations tableOps = new TableOperations(this, dropUnusedColumns, dropUnusedIndexes);
UserTypeOperations userTypeOps = new UserTypeOperations(this, dropUnusedColumns); // FALLTHRU to CREATE case (read: the absence of a `break;` statement here is
// intentional!)
switch (autoDdl) { case CREATE :
case CREATE_DROP: eachUserTypeInOrder(userTypeOps, e -> userTypeOps.createUserType(e));
// Drop view first, otherwise a `DROP TABLE ...` will fail as the type is still referenced sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.TABLE)
// by a view. .forEach(e -> tableOps.createTable(e));
sessionRepository
.entities() sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.VIEW)
.stream() .forEach(e -> tableOps.createView(e));
.filter(e -> e.getType() == HelenusEntityType.VIEW)
.forEach(e -> tableOps.dropView(e)); break;
// Drop tables second, before DROP TYPE otherwise a `DROP TYPE ...` will fail as the type is case VALIDATE :
// still referenced by a table. eachUserTypeInOrder(userTypeOps, e -> userTypeOps.validateUserType(getUserType(e), e));
sessionRepository
.entities() sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.TABLE)
.stream() .forEach(e -> tableOps.validateTable(getTableMetadata(e), e));
.filter(e -> e.getType() == HelenusEntityType.TABLE)
.forEach(e -> tableOps.dropTable(e)); break;
eachUserTypeInReverseOrder(userTypeOps, e -> userTypeOps.dropUserType(e)); case UPDATE :
eachUserTypeInOrder(userTypeOps, e -> userTypeOps.updateUserType(getUserType(e), e));
// FALLTHRU to CREATE case (read: the absence of a `break;` statement here is intentional!)
case CREATE: sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.VIEW)
eachUserTypeInOrder(userTypeOps, e -> userTypeOps.createUserType(e)); .forEach(e -> tableOps.dropView(e));
sessionRepository sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.TABLE)
.entities() .forEach(e -> tableOps.updateTable(getTableMetadata(e), e));
.stream()
.filter(e -> e.getType() == HelenusEntityType.TABLE) sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.VIEW)
.forEach(e -> tableOps.createTable(e)); .forEach(e -> tableOps.createView(e));
break;
sessionRepository }
.entities()
.stream() KeyspaceMetadata km = getKeyspaceMetadata();
.filter(e -> e.getType() == HelenusEntityType.VIEW)
.forEach(e -> tableOps.createView(e)); for (UserType userType : km.getUserTypes()) {
sessionRepository.addUserType(userType.getTypeName(), userType);
break; }
}
case VALIDATE:
eachUserTypeInOrder(userTypeOps, e -> userTypeOps.validateUserType(getUserType(e), e)); private void eachUserTypeInOrder(UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) {
sessionRepository Set<HelenusEntity> processedSet = new HashSet<HelenusEntity>();
.entities() Set<HelenusEntity> stack = new HashSet<HelenusEntity>();
.stream()
.filter(e -> e.getType() == HelenusEntityType.TABLE) sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.UDT).forEach(e -> {
.forEach(e -> tableOps.validateTable(getTableMetadata(e), e)); stack.clear();
eachUserTypeInRecursion(e, processedSet, stack, userTypeOps, action);
break; });
}
case UPDATE:
eachUserTypeInOrder(userTypeOps, e -> userTypeOps.updateUserType(getUserType(e), e)); private void eachUserTypeInReverseOrder(UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) {
ArrayDeque<HelenusEntity> deque = new ArrayDeque<>();
sessionRepository eachUserTypeInOrder(userTypeOps, e -> deque.addFirst(e));
.entities() deque.stream().forEach(e -> {
.stream() action.accept(e);
.filter(e -> e.getType() == HelenusEntityType.VIEW) });
.forEach(e -> tableOps.dropView(e)); }
sessionRepository private void eachUserTypeInRecursion(HelenusEntity e, Set<HelenusEntity> processedSet, Set<HelenusEntity> stack,
.entities() UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) {
.stream()
.filter(e -> e.getType() == HelenusEntityType.TABLE) stack.add(e);
.forEach(e -> tableOps.updateTable(getTableMetadata(e), e));
Collection<HelenusEntity> createBefore = sessionRepository.getUserTypeUses(e);
sessionRepository
.entities() for (HelenusEntity be : createBefore) {
.stream() if (!processedSet.contains(be) && !stack.contains(be)) {
.filter(e -> e.getType() == HelenusEntityType.VIEW) eachUserTypeInRecursion(be, processedSet, stack, userTypeOps, action);
.forEach(e -> tableOps.createView(e)); processedSet.add(be);
break; }
} }
KeyspaceMetadata km = getKeyspaceMetadata(); if (!processedSet.contains(e)) {
action.accept(e);
for (UserType userType : km.getUserTypes()) { processedSet.add(e);
sessionRepository.addUserType(userType.getTypeName(), userType); }
} }
}
private KeyspaceMetadata getKeyspaceMetadata() {
private void eachUserTypeInOrder( if (keyspaceMetadata == null) {
UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) { keyspaceMetadata = session.getCluster().getMetadata().getKeyspace(usingKeyspace.toLowerCase());
}
Set<HelenusEntity> processedSet = new HashSet<HelenusEntity>(); return keyspaceMetadata;
Set<HelenusEntity> stack = new HashSet<HelenusEntity>(); }
sessionRepository private TableMetadata getTableMetadata(HelenusEntity entity) {
.entities() return getKeyspaceMetadata().getTable(entity.getName().getName());
.stream() }
.filter(e -> e.getType() == HelenusEntityType.UDT)
.forEach( private UserType getUserType(HelenusEntity entity) {
e -> { return getKeyspaceMetadata().getUserType(entity.getName().getName());
stack.clear(); }
eachUserTypeInRecursion(e, processedSet, stack, userTypeOps, action);
});
}
private void eachUserTypeInReverseOrder(
UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) {
ArrayDeque<HelenusEntity> deque = new ArrayDeque<>();
eachUserTypeInOrder(userTypeOps, e -> deque.addFirst(e));
deque
.stream()
.forEach(
e -> {
action.accept(e);
});
}
private void eachUserTypeInRecursion(
HelenusEntity e,
Set<HelenusEntity> processedSet,
Set<HelenusEntity> stack,
UserTypeOperations userTypeOps,
Consumer<? super HelenusEntity> action) {
stack.add(e);
Collection<HelenusEntity> createBefore = sessionRepository.getUserTypeUses(e);
for (HelenusEntity be : createBefore) {
if (!processedSet.contains(be) && !stack.contains(be)) {
eachUserTypeInRecursion(be, processedSet, stack, userTypeOps, action);
processedSet.add(be);
}
}
if (!processedSet.contains(e)) {
action.accept(e);
processedSet.add(e);
}
}
private KeyspaceMetadata getKeyspaceMetadata() {
if (keyspaceMetadata == null) {
keyspaceMetadata =
session.getCluster().getMetadata().getKeyspace(usingKeyspace.toLowerCase());
}
return keyspaceMetadata;
}
private TableMetadata getTableMetadata(HelenusEntity entity) {
return getKeyspaceMetadata().getTable(entity.getName().getName());
}
private UserType getUserType(HelenusEntity entity) {
return getKeyspaceMetadata().getUserType(entity.getName().getName());
}
} }

View file

@ -15,30 +15,31 @@
*/ */
package net.helenus.core; package net.helenus.core;
import java.util.Collection;
import com.datastax.driver.core.UserType; import com.datastax.driver.core.UserType;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import java.util.Collection;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
public final class SessionRepository { public final class SessionRepository {
private final ImmutableMap<String, UserType> userTypeMap; private final ImmutableMap<String, UserType> userTypeMap;
private final ImmutableMap<Class<?>, HelenusEntity> entityMap; private final ImmutableMap<Class<?>, HelenusEntity> entityMap;
public SessionRepository(SessionRepositoryBuilder builder) { public SessionRepository(SessionRepositoryBuilder builder) {
userTypeMap = ImmutableMap.<String, UserType>builder().putAll(builder.getUserTypeMap()).build(); userTypeMap = ImmutableMap.<String, UserType>builder().putAll(builder.getUserTypeMap()).build();
entityMap = entityMap = ImmutableMap.<Class<?>, HelenusEntity>builder().putAll(builder.getEntityMap()).build();
ImmutableMap.<Class<?>, HelenusEntity>builder().putAll(builder.getEntityMap()).build(); }
}
public UserType findUserType(String name) { public UserType findUserType(String name) {
return userTypeMap.get(name.toLowerCase()); return userTypeMap.get(name.toLowerCase());
} }
public Collection<HelenusEntity> entities() { public Collection<HelenusEntity> entities() {
return entityMap.values(); return entityMap.values();
} }
} }

View file

@ -15,15 +15,17 @@
*/ */
package net.helenus.core; package net.helenus.core;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import com.datastax.driver.core.Session; import com.datastax.driver.core.Session;
import com.datastax.driver.core.UDTValue; import com.datastax.driver.core.UDTValue;
import com.datastax.driver.core.UserType; import com.datastax.driver.core.UserType;
import com.google.common.collect.HashMultimap; import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap; import com.google.common.collect.Multimap;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.HelenusEntityType; import net.helenus.mapping.HelenusEntityType;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
@ -33,112 +35,110 @@ import net.helenus.support.HelenusMappingException;
public final class SessionRepositoryBuilder { public final class SessionRepositoryBuilder {
private static final Optional<HelenusEntityType> OPTIONAL_UDT = private static final Optional<HelenusEntityType> OPTIONAL_UDT = Optional.of(HelenusEntityType.UDT);
Optional.of(HelenusEntityType.UDT);
private final Map<Class<?>, HelenusEntity> entityMap = new HashMap<Class<?>, HelenusEntity>(); private final Map<Class<?>, HelenusEntity> entityMap = new HashMap<Class<?>, HelenusEntity>();
private final Map<String, UserType> userTypeMap = new HashMap<String, UserType>(); private final Map<String, UserType> userTypeMap = new HashMap<String, UserType>();
private final Multimap<HelenusEntity, HelenusEntity> userTypeUsesMap = HashMultimap.create(); private final Multimap<HelenusEntity, HelenusEntity> userTypeUsesMap = HashMultimap.create();
private final Session session; private final Session session;
SessionRepositoryBuilder(Session session) { SessionRepositoryBuilder(Session session) {
this.session = session; this.session = session;
} }
public SessionRepository build() { public SessionRepository build() {
return new SessionRepository(this); return new SessionRepository(this);
} }
public Collection<HelenusEntity> getUserTypeUses(HelenusEntity udtName) { public Collection<HelenusEntity> getUserTypeUses(HelenusEntity udtName) {
return userTypeUsesMap.get(udtName); return userTypeUsesMap.get(udtName);
} }
public Collection<HelenusEntity> entities() { public Collection<HelenusEntity> entities() {
return entityMap.values(); return entityMap.values();
} }
protected Map<Class<?>, HelenusEntity> getEntityMap() { protected Map<Class<?>, HelenusEntity> getEntityMap() {
return entityMap; return entityMap;
} }
protected Map<String, UserType> getUserTypeMap() { protected Map<String, UserType> getUserTypeMap() {
return userTypeMap; return userTypeMap;
} }
public void addUserType(String name, UserType userType) { public void addUserType(String name, UserType userType) {
userTypeMap.putIfAbsent(name.toLowerCase(), userType); userTypeMap.putIfAbsent(name.toLowerCase(), userType);
} }
public HelenusEntity add(Object dsl) { public HelenusEntity add(Object dsl) {
return add(dsl, Optional.empty()); return add(dsl, Optional.empty());
} }
public void addEntity(HelenusEntity entity) { public void addEntity(HelenusEntity entity) {
HelenusEntity concurrentEntity = entityMap.putIfAbsent(entity.getMappingInterface(), entity); HelenusEntity concurrentEntity = entityMap.putIfAbsent(entity.getMappingInterface(), entity);
if (concurrentEntity == null) { if (concurrentEntity == null) {
addUserDefinedTypes(entity.getOrderedProperties()); addUserDefinedTypes(entity.getOrderedProperties());
} }
} }
public HelenusEntity add(Object dsl, Optional<HelenusEntityType> type) { public HelenusEntity add(Object dsl, Optional<HelenusEntityType> type) {
HelenusEntity helenusEntity = Helenus.resolve(dsl, session.getCluster().getMetadata()); HelenusEntity helenusEntity = Helenus.resolve(dsl, session.getCluster().getMetadata());
Class<?> iface = helenusEntity.getMappingInterface(); Class<?> iface = helenusEntity.getMappingInterface();
HelenusEntity entity = entityMap.get(iface); HelenusEntity entity = entityMap.get(iface);
if (entity == null) { if (entity == null) {
entity = helenusEntity; entity = helenusEntity;
if (type.isPresent() && entity.getType() != type.get()) { if (type.isPresent() && entity.getType() != type.get()) {
throw new HelenusMappingException( throw new HelenusMappingException("unexpected entity type " + entity.getType() + " for " + entity);
"unexpected entity type " + entity.getType() + " for " + entity); }
}
HelenusEntity concurrentEntity = entityMap.putIfAbsent(iface, entity); HelenusEntity concurrentEntity = entityMap.putIfAbsent(iface, entity);
if (concurrentEntity == null) { if (concurrentEntity == null) {
addUserDefinedTypes(entity.getOrderedProperties()); addUserDefinedTypes(entity.getOrderedProperties());
} else { } else {
entity = concurrentEntity; entity = concurrentEntity;
} }
} }
return entity; return entity;
} }
private void addUserDefinedTypes(Collection<HelenusProperty> props) { private void addUserDefinedTypes(Collection<HelenusProperty> props) {
for (HelenusProperty prop : props) { for (HelenusProperty prop : props) {
AbstractDataType type = prop.getDataType(); AbstractDataType type = prop.getDataType();
if (type instanceof DTDataType) { if (type instanceof DTDataType) {
continue; continue;
} }
if (!UDTValue.class.isAssignableFrom(prop.getJavaType())) { if (!UDTValue.class.isAssignableFrom(prop.getJavaType())) {
for (Class<?> udtClass : type.getTypeArguments()) { for (Class<?> udtClass : type.getTypeArguments()) {
if (UDTValue.class.isAssignableFrom(udtClass)) { if (UDTValue.class.isAssignableFrom(udtClass)) {
continue; continue;
} }
HelenusEntity addedUserType = add(udtClass, OPTIONAL_UDT); HelenusEntity addedUserType = add(udtClass, OPTIONAL_UDT);
if (HelenusEntityType.UDT == prop.getEntity().getType()) { if (HelenusEntityType.UDT == prop.getEntity().getType()) {
userTypeUsesMap.put(prop.getEntity(), addedUserType); userTypeUsesMap.put(prop.getEntity(), addedUserType);
} }
} }
} }
} }
} }
} }

View file

@ -15,97 +15,88 @@
*/ */
package net.helenus.core; package net.helenus.core;
import java.util.List;
import com.datastax.driver.core.TableMetadata; import com.datastax.driver.core.TableMetadata;
import com.datastax.driver.core.schemabuilder.SchemaStatement; import com.datastax.driver.core.schemabuilder.SchemaStatement;
import java.util.List;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
import net.helenus.support.HelenusException; import net.helenus.support.HelenusException;
public final class TableOperations { public final class TableOperations {
private final AbstractSessionOperations sessionOps; private final AbstractSessionOperations sessionOps;
private final boolean dropUnusedColumns; private final boolean dropUnusedColumns;
private final boolean dropUnusedIndexes; private final boolean dropUnusedIndexes;
public TableOperations( public TableOperations(AbstractSessionOperations sessionOps, boolean dropUnusedColumns, boolean dropUnusedIndexes) {
AbstractSessionOperations sessionOps, boolean dropUnusedColumns, boolean dropUnusedIndexes) { this.sessionOps = sessionOps;
this.sessionOps = sessionOps; this.dropUnusedColumns = dropUnusedColumns;
this.dropUnusedColumns = dropUnusedColumns; this.dropUnusedIndexes = dropUnusedIndexes;
this.dropUnusedIndexes = dropUnusedIndexes; }
}
public void createTable(HelenusEntity entity) { public void createTable(HelenusEntity entity) {
sessionOps.execute(SchemaUtil.createTable(entity), true); sessionOps.execute(SchemaUtil.createTable(entity), true);
executeBatch(SchemaUtil.createIndexes(entity)); executeBatch(SchemaUtil.createIndexes(entity));
} }
public void dropTable(HelenusEntity entity) { public void dropTable(HelenusEntity entity) {
sessionOps.execute(SchemaUtil.dropTable(entity), true); sessionOps.execute(SchemaUtil.dropTable(entity), true);
} }
public void validateTable(TableMetadata tmd, HelenusEntity entity) { public void validateTable(TableMetadata tmd, HelenusEntity entity) {
if (tmd == null) { if (tmd == null) {
throw new HelenusException( throw new HelenusException(
"table does not exists " "table does not exists " + entity.getName() + "for entity " + entity.getMappingInterface());
+ entity.getName() }
+ "for entity "
+ entity.getMappingInterface());
}
List<SchemaStatement> list = SchemaUtil.alterTable(tmd, entity, dropUnusedColumns); List<SchemaStatement> list = SchemaUtil.alterTable(tmd, entity, dropUnusedColumns);
list.addAll(SchemaUtil.alterIndexes(tmd, entity, dropUnusedIndexes)); list.addAll(SchemaUtil.alterIndexes(tmd, entity, dropUnusedIndexes));
if (!list.isEmpty()) { if (!list.isEmpty()) {
throw new HelenusException( throw new HelenusException(
"schema changed for entity " "schema changed for entity " + entity.getMappingInterface() + ", apply this command: " + list);
+ entity.getMappingInterface() }
+ ", apply this command: " }
+ list);
}
}
public void updateTable(TableMetadata tmd, HelenusEntity entity) { public void updateTable(TableMetadata tmd, HelenusEntity entity) {
if (tmd == null) { if (tmd == null) {
createTable(entity); createTable(entity);
return; return;
} }
executeBatch(SchemaUtil.alterTable(tmd, entity, dropUnusedColumns)); executeBatch(SchemaUtil.alterTable(tmd, entity, dropUnusedColumns));
executeBatch(SchemaUtil.alterIndexes(tmd, entity, dropUnusedIndexes)); executeBatch(SchemaUtil.alterIndexes(tmd, entity, dropUnusedIndexes));
} }
public void createView(HelenusEntity entity) { public void createView(HelenusEntity entity) {
sessionOps.execute( sessionOps.execute(
SchemaUtil.createMaterializedView( SchemaUtil.createMaterializedView(sessionOps.usingKeyspace(), entity.getName().toCql(), entity), true);
sessionOps.usingKeyspace(), entity.getName().toCql(), entity), // executeBatch(SchemaUtil.createIndexes(entity)); NOTE: Unfortunately C* 3.10
true); // does not yet support 2i on materialized views.
// executeBatch(SchemaUtil.createIndexes(entity)); NOTE: Unfortunately C* 3.10 does not yet support 2i on materialized views. }
}
public void dropView(HelenusEntity entity) { public void dropView(HelenusEntity entity) {
sessionOps.execute( sessionOps.execute(
SchemaUtil.dropMaterializedView( SchemaUtil.dropMaterializedView(sessionOps.usingKeyspace(), entity.getName().toCql(), entity), true);
sessionOps.usingKeyspace(), entity.getName().toCql(), entity), }
true);
}
public void updateView(TableMetadata tmd, HelenusEntity entity) { public void updateView(TableMetadata tmd, HelenusEntity entity) {
if (tmd == null) { if (tmd == null) {
createTable(entity); createTable(entity);
return; return;
} }
executeBatch(SchemaUtil.alterTable(tmd, entity, dropUnusedColumns)); executeBatch(SchemaUtil.alterTable(tmd, entity, dropUnusedColumns));
executeBatch(SchemaUtil.alterIndexes(tmd, entity, dropUnusedIndexes)); executeBatch(SchemaUtil.alterIndexes(tmd, entity, dropUnusedIndexes));
} }
private void executeBatch(List<SchemaStatement> list) { private void executeBatch(List<SchemaStatement> list) {
list.forEach( list.forEach(s -> {
s -> { sessionOps.execute(s, true);
sessionOps.execute(s, true); });
}); }
}
} }

View file

@ -15,42 +15,54 @@
*/ */
package net.helenus.core; package net.helenus.core;
import java.util.List;
import java.util.Optional;
import java.util.Map; import com.google.common.base.Stopwatch;
import java.util.Set;
public interface UnitOfWork<E extends Exception> extends AutoCloseable { import net.helenus.core.cache.Facet;
/** public interface UnitOfWork<X extends Exception> extends AutoCloseable {
* Marks the beginning of a transactional section of work. Will write a record to the shared
* write-ahead log.
*
* @return the handle used to commit or abort the work.
*/
UnitOfWork begin();
UnitOfWork addNestedUnitOfWork(UnitOfWork uow); /**
* Marks the beginning of a transactional section of work. Will write a record
* to the shared write-ahead log.
*
* @return the handle used to commit or abort the work.
*/
UnitOfWork<X> begin();
/** void addNestedUnitOfWork(UnitOfWork<X> uow);
* Checks to see if the work performed between calling begin and now can be committed or not.
*
* @return a function from which to chain work that only happens when commit is successful
* @throws E when the work overlaps with other concurrent writers.
*/
PostCommitFunction<Void, Void> commit() throws E;
/** /**
* Explicitly abort the work within this unit of work. Any nested aborted unit of work will * Checks to see if the work performed between calling begin and now can be
* trigger the entire unit of work to commit. * committed or not.
*/ *
void abort(); * @return a function from which to chain work that only happens when commit is
* successful
* @throws X
* when the work overlaps with other concurrent writers.
*/
PostCommitFunction<Void, Void> commit() throws X;
boolean hasAborted(); /**
* Explicitly abort the work within this unit of work. Any nested aborted unit
* of work will trigger the entire unit of work to commit.
*/
void abort();
boolean hasCommitted(); boolean hasAborted();
//Either<Object, Set<Object>> cacheLookup(String key); boolean hasCommitted();
Set<Object> cacheLookup(String key);
Optional<Object> cacheLookup(List<Facet> facets);
void cacheUpdate(Object pojo, List<Facet> facets);
UnitOfWork setPurpose(String purpose);
Stopwatch getExecutionTimer();
Stopwatch getCacheLookupTimer();
Map<String, Set<Object>> getCache();
} }

View file

@ -19,8 +19,8 @@ import net.helenus.support.HelenusException;
class UnitOfWorkImpl extends AbstractUnitOfWork<HelenusException> { class UnitOfWorkImpl extends AbstractUnitOfWork<HelenusException> {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public UnitOfWorkImpl(HelenusSession session, UnitOfWork parent) { public UnitOfWorkImpl(HelenusSession session, UnitOfWork parent) {
super(session, (AbstractUnitOfWork<HelenusException>) parent); super(session, (AbstractUnitOfWork<HelenusException>) parent);
} }
} }

View file

@ -15,65 +15,63 @@
*/ */
package net.helenus.core; package net.helenus.core;
import java.util.List;
import com.datastax.driver.core.UserType; import com.datastax.driver.core.UserType;
import com.datastax.driver.core.schemabuilder.SchemaStatement; import com.datastax.driver.core.schemabuilder.SchemaStatement;
import java.util.List;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
import net.helenus.support.HelenusException; import net.helenus.support.HelenusException;
public final class UserTypeOperations { public final class UserTypeOperations {
private final AbstractSessionOperations sessionOps; private final AbstractSessionOperations sessionOps;
private final boolean dropUnusedColumns; private final boolean dropUnusedColumns;
public UserTypeOperations(AbstractSessionOperations sessionOps, boolean dropUnusedColumns) { public UserTypeOperations(AbstractSessionOperations sessionOps, boolean dropUnusedColumns) {
this.sessionOps = sessionOps; this.sessionOps = sessionOps;
this.dropUnusedColumns = dropUnusedColumns; this.dropUnusedColumns = dropUnusedColumns;
} }
public void createUserType(HelenusEntity entity) { public void createUserType(HelenusEntity entity) {
sessionOps.execute(SchemaUtil.createUserType(entity), true); sessionOps.execute(SchemaUtil.createUserType(entity), true);
} }
public void dropUserType(HelenusEntity entity) { public void dropUserType(HelenusEntity entity) {
sessionOps.execute(SchemaUtil.dropUserType(entity), true); sessionOps.execute(SchemaUtil.dropUserType(entity), true);
} }
public void validateUserType(UserType userType, HelenusEntity entity) { public void validateUserType(UserType userType, HelenusEntity entity) {
if (userType == null) { if (userType == null) {
throw new HelenusException( throw new HelenusException(
"userType not exists " + entity.getName() + "for entity " + entity.getMappingInterface()); "userType not exists " + entity.getName() + "for entity " + entity.getMappingInterface());
} }
List<SchemaStatement> list = SchemaUtil.alterUserType(userType, entity, dropUnusedColumns); List<SchemaStatement> list = SchemaUtil.alterUserType(userType, entity, dropUnusedColumns);
if (!list.isEmpty()) { if (!list.isEmpty()) {
throw new HelenusException( throw new HelenusException(
"schema changed for entity " "schema changed for entity " + entity.getMappingInterface() + ", apply this command: " + list);
+ entity.getMappingInterface() }
+ ", apply this command: " }
+ list);
}
}
public void updateUserType(UserType userType, HelenusEntity entity) { public void updateUserType(UserType userType, HelenusEntity entity) {
if (userType == null) { if (userType == null) {
createUserType(entity); createUserType(entity);
return; return;
} }
executeBatch(SchemaUtil.alterUserType(userType, entity, dropUnusedColumns)); executeBatch(SchemaUtil.alterUserType(userType, entity, dropUnusedColumns));
} }
private void executeBatch(List<SchemaStatement> list) { private void executeBatch(List<SchemaStatement> list) {
list.forEach( list.forEach(s -> {
s -> { sessionOps.execute(s, true);
sessionOps.execute(s, true); });
}); }
}
} }

View file

@ -1,3 +1,18 @@
/*
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.helenus.core.annotation; package net.helenus.core.annotation;
import java.lang.annotation.ElementType; import java.lang.annotation.ElementType;
@ -7,4 +22,5 @@ import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE) @Target(ElementType.TYPE)
public @interface Cacheable {} public @interface Cacheable {
}

View file

@ -4,13 +4,14 @@ import java.lang.annotation.ElementType;
import java.lang.annotation.Retention; import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy; import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target; import java.lang.annotation.Target;
import net.helenus.core.ConflictingUnitOfWorkException; import net.helenus.core.ConflictingUnitOfWorkException;
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD) @Target(ElementType.METHOD)
public @interface Retry { public @interface Retry {
Class<? extends Exception>[] on() default ConflictingUnitOfWorkException.class; Class<? extends Exception>[] on() default ConflictingUnitOfWorkException.class;
int times() default 3; int times() default 3;
} }

View file

@ -0,0 +1,83 @@
package net.helenus.core.aspect;
import java.lang.reflect.Method;
import java.util.Arrays;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.reflect.MethodSignature;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.annotation.AnnotationUtils;
import org.springframework.util.Assert;
import net.helenus.core.annotation.Retry;
@Aspect
public class RetryAspect {
private static final Logger log = LoggerFactory.getLogger(RetryAspect.class);
@Around("@annotation(net.helenus.core.annotations.Retry)")
public Object retry(ProceedingJoinPoint pjp) throws Throwable {
Retry retryAnnotation = getRetryAnnotation(pjp);
return (retryAnnotation != null) ? proceed(pjp, retryAnnotation) : proceed(pjp);
}
private Object proceed(ProceedingJoinPoint pjp) throws Throwable {
return pjp.proceed();
}
private Object proceed(ProceedingJoinPoint pjp, Retry retryAnnotation) throws Throwable {
int times = retryAnnotation.times();
Class<? extends Throwable>[] retryOn = retryAnnotation.on();
Assert.isTrue(times > 0, "@Retry{times} should be greater than 0!");
Assert.isTrue(retryOn.length > 0, "@Retry{on} should have at least one Throwable!");
log.info("Proceed with {} retries on {}", times, Arrays.toString(retryOn));
return tryProceeding(pjp, times, retryOn);
}
private Object tryProceeding(ProceedingJoinPoint pjp, int times, Class<? extends Throwable>[] retryOn)
throws Throwable {
try {
return proceed(pjp);
} catch (Throwable throwable) {
if (isRetryThrowable(throwable, retryOn) && times-- > 0) {
log.info("Conflict detected, {} remaining retries on {}", times, Arrays.toString(retryOn));
return tryProceeding(pjp, times, retryOn);
}
throw throwable;
}
}
private boolean isRetryThrowable(Throwable throwable, Class<? extends Throwable>[] retryOn) {
Throwable[] causes = ExceptionUtils.getThrowables(throwable);
for (Throwable cause : causes) {
for (Class<? extends Throwable> retryThrowable : retryOn) {
if (retryThrowable.isAssignableFrom(cause.getClass())) {
return true;
}
}
}
return false;
}
private Retry getRetryAnnotation(ProceedingJoinPoint pjp) throws NoSuchMethodException {
MethodSignature signature = (MethodSignature) pjp.getSignature();
Method method = signature.getMethod();
Retry retryAnnotation = AnnotationUtils.findAnnotation(method, Retry.class);
if (retryAnnotation != null) {
return retryAnnotation;
}
Class[] argClasses = new Class[pjp.getArgs().length];
for (int i = 0; i < pjp.getArgs().length; i++) {
argClasses[i] = pjp.getArgs()[i].getClass();
}
method = pjp.getTarget().getClass().getMethod(pjp.getSignature().getName(), argClasses);
return AnnotationUtils.findAnnotation(method, Retry.class);
}
}

View file

@ -2,7 +2,7 @@ package net.helenus.core.aspect;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.Arrays; import java.util.Arrays;
import net.helenus.core.annotation.Retry;
import org.apache.commons.lang3.exception.ExceptionUtils; import org.apache.commons.lang3.exception.ExceptionUtils;
import org.aspectj.lang.ProceedingJoinPoint; import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around; import org.aspectj.lang.annotation.Around;
@ -13,69 +13,71 @@ import org.slf4j.LoggerFactory;
import org.springframework.core.annotation.AnnotationUtils; import org.springframework.core.annotation.AnnotationUtils;
import org.springframework.util.Assert; import org.springframework.util.Assert;
import net.helenus.core.annotation.Retry;
@Aspect @Aspect
public class RetryConcurrentUnitOfWorkAspect { public class RetryConcurrentUnitOfWorkAspect {
private static final Logger log = LoggerFactory.getLogger(RetryConcurrentUnitOfWorkAspect.class); private static final Logger log = LoggerFactory.getLogger(RetryConcurrentUnitOfWorkAspect.class);
@Around("@annotation(net.helenus.core.annotations.Retry)") @Around("@annotation(net.helenus.core.annotations.Retry)")
public Object retry(ProceedingJoinPoint pjp) throws Throwable { public Object retry(ProceedingJoinPoint pjp) throws Throwable {
Retry retryAnnotation = getRetryAnnotation(pjp); Retry retryAnnotation = getRetryAnnotation(pjp);
return (retryAnnotation != null) ? proceed(pjp, retryAnnotation) : proceed(pjp); return (retryAnnotation != null) ? proceed(pjp, retryAnnotation) : proceed(pjp);
} }
private Object proceed(ProceedingJoinPoint pjp) throws Throwable { private Object proceed(ProceedingJoinPoint pjp) throws Throwable {
return pjp.proceed(); return pjp.proceed();
} }
private Object proceed(ProceedingJoinPoint pjp, Retry retryAnnotation) throws Throwable { private Object proceed(ProceedingJoinPoint pjp, Retry retryAnnotation) throws Throwable {
int times = retryAnnotation.times(); int times = retryAnnotation.times();
Class<? extends Throwable>[] retryOn = retryAnnotation.on(); Class<? extends Throwable>[] retryOn = retryAnnotation.on();
Assert.isTrue(times > 0, "@Retry{times} should be greater than 0!"); Assert.isTrue(times > 0, "@Retry{times} should be greater than 0!");
Assert.isTrue(retryOn.length > 0, "@Retry{on} should have at least one Throwable!"); Assert.isTrue(retryOn.length > 0, "@Retry{on} should have at least one Throwable!");
log.info("Proceed with {} retries on {}", times, Arrays.toString(retryOn)); log.info("Proceed with {} retries on {}", times, Arrays.toString(retryOn));
return tryProceeding(pjp, times, retryOn); return tryProceeding(pjp, times, retryOn);
} }
private Object tryProceeding( private Object tryProceeding(ProceedingJoinPoint pjp, int times, Class<? extends Throwable>[] retryOn)
ProceedingJoinPoint pjp, int times, Class<? extends Throwable>[] retryOn) throws Throwable { throws Throwable {
try { try {
return proceed(pjp); return proceed(pjp);
} catch (Throwable throwable) { } catch (Throwable throwable) {
if (isRetryThrowable(throwable, retryOn) && times-- > 0) { if (isRetryThrowable(throwable, retryOn) && times-- > 0) {
log.info("Conflict detected, {} remaining retries on {}", times, Arrays.toString(retryOn)); log.info("Conflict detected, {} remaining retries on {}", times, Arrays.toString(retryOn));
return tryProceeding(pjp, times, retryOn); return tryProceeding(pjp, times, retryOn);
} }
throw throwable; throw throwable;
} }
} }
private boolean isRetryThrowable(Throwable throwable, Class<? extends Throwable>[] retryOn) { private boolean isRetryThrowable(Throwable throwable, Class<? extends Throwable>[] retryOn) {
Throwable[] causes = ExceptionUtils.getThrowables(throwable); Throwable[] causes = ExceptionUtils.getThrowables(throwable);
for (Throwable cause : causes) { for (Throwable cause : causes) {
for (Class<? extends Throwable> retryThrowable : retryOn) { for (Class<? extends Throwable> retryThrowable : retryOn) {
if (retryThrowable.isAssignableFrom(cause.getClass())) { if (retryThrowable.isAssignableFrom(cause.getClass())) {
return true; return true;
} }
} }
} }
return false; return false;
} }
private Retry getRetryAnnotation(ProceedingJoinPoint pjp) throws NoSuchMethodException { private Retry getRetryAnnotation(ProceedingJoinPoint pjp) throws NoSuchMethodException {
MethodSignature signature = (MethodSignature) pjp.getSignature(); MethodSignature signature = (MethodSignature) pjp.getSignature();
Method method = signature.getMethod(); Method method = signature.getMethod();
Retry retryAnnotation = AnnotationUtils.findAnnotation(method, Retry.class); Retry retryAnnotation = AnnotationUtils.findAnnotation(method, Retry.class);
if (retryAnnotation != null) { if (retryAnnotation != null) {
return retryAnnotation; return retryAnnotation;
} }
Class[] argClasses = new Class[pjp.getArgs().length]; Class[] argClasses = new Class[pjp.getArgs().length];
for (int i = 0; i < pjp.getArgs().length; i++) { for (int i = 0; i < pjp.getArgs().length; i++) {
argClasses[i] = pjp.getArgs()[i].getClass(); argClasses[i] = pjp.getArgs()[i].getClass();
} }
method = pjp.getTarget().getClass().getMethod(pjp.getSignature().getName(), argClasses); method = pjp.getTarget().getClass().getMethod(pjp.getSignature().getName(), argClasses);
return AnnotationUtils.findAnnotation(method, Retry.class); return AnnotationUtils.findAnnotation(method, Retry.class);
} }
} }

View file

@ -0,0 +1,38 @@
/*
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.helenus.core.cache;
import java.util.Map;
import java.util.stream.Collectors;
import net.helenus.mapping.HelenusProperty;
public class BoundFacet extends Facet<String> {
private final Map<HelenusProperty, Object> properties;
BoundFacet(String name, Map<HelenusProperty, Object> properties) {
super(name,
(properties.keySet().size() > 1)
? "[" + String.join(", ",
properties.keySet().stream().map(key -> properties.get(key).toString())
.collect(Collectors.toSet()))
+ "]"
: String.join("", properties.keySet().stream().map(key -> properties.get(key).toString())
.collect(Collectors.toSet())));
this.properties = properties;
}
}

View file

@ -0,0 +1,49 @@
package net.helenus.core.cache;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
public class CacheUtil {
public static List<String[]> combinations(List<String> items) {
int n = items.size();
if (n > 20 || n < 0)
throw new IllegalArgumentException(n + " is out of range");
long e = Math.round(Math.pow(2, n));
List<String[]> out = new ArrayList<String[]>((int) e - 1);
for (int k = 1; k <= items.size(); k++) {
kCombinations(items, 0, k, new String[k], out);
}
return out;
}
private static void kCombinations(List<String> items, int n, int k, String[] arr, List<String[]> out) {
if (k == 0) {
out.add(arr.clone());
} else {
for (int i = n; i <= items.size() - k; i++) {
arr[arr.length - k] = items.get(i);
kCombinations(items, i + 1, k - 1, arr, out);
}
}
}
public static List<String[]> flattenFacets(List<Facet> facets) {
List<String[]> combinations = CacheUtil.combinations(
facets.stream().filter(facet -> !facet.fixed()).filter(facet -> facet.value() != null).map(facet -> {
return facet.name() + "==" + facet.value();
}).collect(Collectors.toList()));
return combinations;
}
public static Object merge(Object to, Object from) {
return to; // TODO(gburd): yeah...
}
public static String schemaName(List<Facet> facets) {
return facets.stream().filter(Facet::fixed).map(facet -> facet.value().toString())
.collect(Collectors.joining("."));
}
}

View file

@ -0,0 +1,53 @@
/*
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.helenus.core.cache;
/**
* An Entity is identifiable via one or more Facets
*/
public class Facet<T> {
private final String name;
private T value;
private boolean fixed = false;
public Facet(String name) {
this.name = name;
}
public Facet(String name, T value) {
this.name = name;
this.value = value;
}
public String name() {
return name;
}
public T value() {
return value;
}
public Facet setFixed() {
fixed = true;
return this;
}
public boolean fixed() {
return fixed;
}
}

View file

@ -0,0 +1,74 @@
/*
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.helenus.core.cache;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import net.helenus.core.SchemaUtil;
import net.helenus.mapping.HelenusProperty;
public class UnboundFacet extends Facet<String> {
private final List<HelenusProperty> properties;
public UnboundFacet(List<HelenusProperty> properties) {
super(SchemaUtil.createPrimaryKeyPhrase(properties));
this.properties = properties;
}
public UnboundFacet(HelenusProperty property) {
super(property.getPropertyName());
properties = new ArrayList<HelenusProperty>();
properties.add(property);
}
public List<HelenusProperty> getProperties() {
return properties;
}
public Binder binder() {
return new Binder(name(), properties);
}
public static class Binder {
private final String name;
private final List<HelenusProperty> properties = new ArrayList<HelenusProperty>();
private Map<HelenusProperty, Object> boundProperties = new HashMap<HelenusProperty, Object>();
Binder(String name, List<HelenusProperty> properties) {
this.name = name;
this.properties.addAll(properties);
}
public Binder setValueForProperty(HelenusProperty prop, Object value) {
properties.remove(prop);
boundProperties.put(prop, value);
return this;
}
public boolean isBound() {
return properties.isEmpty();
}
public BoundFacet bind() {
return new BoundFacet(name, boundProperties);
}
}
}

View file

@ -17,92 +17,94 @@ package net.helenus.core.operation;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import net.helenus.core.*; import net.helenus.core.*;
public abstract class AbstractFilterOperation<E, O extends AbstractFilterOperation<E, O>> public abstract class AbstractFilterOperation<E, O extends AbstractFilterOperation<E, O>>
extends AbstractOperation<E, O> { extends
AbstractOperation<E, O> {
protected List<Filter<?>> filters = null; protected List<Filter<?>> filters = null;
protected List<Filter<?>> ifFilters = null; protected List<Filter<?>> ifFilters = null;
public AbstractFilterOperation(AbstractSessionOperations sessionOperations) { public AbstractFilterOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations); super(sessionOperations);
} }
public <V> O where(Getter<V> getter, Postulate<V> postulate) { public <V> O where(Getter<V> getter, Postulate<V> postulate) {
addFilter(Filter.create(getter, postulate)); addFilter(Filter.create(getter, postulate));
return (O) this; return (O) this;
} }
public <V> O where(Getter<V> getter, Operator operator, V val) { public <V> O where(Getter<V> getter, Operator operator, V val) {
addFilter(Filter.create(getter, operator, val)); addFilter(Filter.create(getter, operator, val));
return (O) this; return (O) this;
} }
public <V> O where(Filter<V> filter) { public <V> O where(Filter<V> filter) {
addFilter(filter); addFilter(filter);
return (O) this; return (O) this;
} }
public <V> O and(Getter<V> getter, Postulate<V> postulate) { public <V> O and(Getter<V> getter, Postulate<V> postulate) {
addFilter(Filter.create(getter, postulate)); addFilter(Filter.create(getter, postulate));
return (O) this; return (O) this;
} }
public <V> O and(Getter<V> getter, Operator operator, V val) { public <V> O and(Getter<V> getter, Operator operator, V val) {
addFilter(Filter.create(getter, operator, val)); addFilter(Filter.create(getter, operator, val));
return (O) this; return (O) this;
} }
public <V> O and(Filter<V> filter) { public <V> O and(Filter<V> filter) {
addFilter(filter); addFilter(filter);
return (O) this; return (O) this;
} }
public <V> O onlyIf(Getter<V> getter, Postulate<V> postulate) { public <V> O onlyIf(Getter<V> getter, Postulate<V> postulate) {
addIfFilter(Filter.create(getter, postulate)); addIfFilter(Filter.create(getter, postulate));
return (O) this; return (O) this;
} }
public <V> O onlyIf(Getter<V> getter, Operator operator, V val) { public <V> O onlyIf(Getter<V> getter, Operator operator, V val) {
addIfFilter(Filter.create(getter, operator, val)); addIfFilter(Filter.create(getter, operator, val));
return (O) this; return (O) this;
} }
public <V> O onlyIf(Filter<V> filter) { public <V> O onlyIf(Filter<V> filter) {
addIfFilter(filter); addIfFilter(filter);
return (O) this; return (O) this;
} }
private void addFilter(Filter<?> filter) { private void addFilter(Filter<?> filter) {
if (filters == null) { if (filters == null) {
filters = new LinkedList<Filter<?>>(); filters = new LinkedList<Filter<?>>();
} }
filters.add(filter); filters.add(filter);
} }
private void addIfFilter(Filter<?> filter) { private void addIfFilter(Filter<?> filter) {
if (ifFilters == null) { if (ifFilters == null) {
ifFilters = new LinkedList<Filter<?>>(); ifFilters = new LinkedList<Filter<?>>();
} }
ifFilters.add(filter); ifFilters.add(filter);
} }
} }

View file

@ -19,94 +19,95 @@ import java.util.LinkedHashMap;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import net.helenus.core.*; import net.helenus.core.*;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
public abstract class AbstractFilterOptionalOperation< public abstract class AbstractFilterOptionalOperation<E, O extends AbstractFilterOptionalOperation<E, O>>
E, O extends AbstractFilterOptionalOperation<E, O>> extends
extends AbstractOptionalOperation<E, O> { AbstractOptionalOperation<E, O> {
protected Map<HelenusProperty, Filter<?>> filters = null; protected Map<HelenusProperty, Filter<?>> filters = null;
protected List<Filter<?>> ifFilters = null; protected List<Filter<?>> ifFilters = null;
public AbstractFilterOptionalOperation(AbstractSessionOperations sessionOperations) { public AbstractFilterOptionalOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations); super(sessionOperations);
} }
public <V> O where(Getter<V> getter, Postulate<V> postulate) { public <V> O where(Getter<V> getter, Postulate<V> postulate) {
addFilter(Filter.create(getter, postulate)); addFilter(Filter.create(getter, postulate));
return (O) this; return (O) this;
} }
public <V> O where(Getter<V> getter, Operator operator, V val) { public <V> O where(Getter<V> getter, Operator operator, V val) {
addFilter(Filter.create(getter, operator, val)); addFilter(Filter.create(getter, operator, val));
return (O) this; return (O) this;
} }
public <V> O where(Filter<V> filter) { public <V> O where(Filter<V> filter) {
addFilter(filter); addFilter(filter);
return (O) this; return (O) this;
} }
public <V> O and(Getter<V> getter, Postulate<V> postulate) { public <V> O and(Getter<V> getter, Postulate<V> postulate) {
addFilter(Filter.create(getter, postulate)); addFilter(Filter.create(getter, postulate));
return (O) this; return (O) this;
} }
public <V> O and(Getter<V> getter, Operator operator, V val) { public <V> O and(Getter<V> getter, Operator operator, V val) {
addFilter(Filter.create(getter, operator, val)); addFilter(Filter.create(getter, operator, val));
return (O) this; return (O) this;
} }
public <V> O and(Filter<V> filter) { public <V> O and(Filter<V> filter) {
addFilter(filter); addFilter(filter);
return (O) this; return (O) this;
} }
public <V> O onlyIf(Getter<V> getter, Postulate<V> postulate) { public <V> O onlyIf(Getter<V> getter, Postulate<V> postulate) {
addIfFilter(Filter.create(getter, postulate)); addIfFilter(Filter.create(getter, postulate));
return (O) this; return (O) this;
} }
public <V> O onlyIf(Getter<V> getter, Operator operator, V val) { public <V> O onlyIf(Getter<V> getter, Operator operator, V val) {
addIfFilter(Filter.create(getter, operator, val)); addIfFilter(Filter.create(getter, operator, val));
return (O) this; return (O) this;
} }
public <V> O onlyIf(Filter<V> filter) { public <V> O onlyIf(Filter<V> filter) {
addIfFilter(filter); addIfFilter(filter);
return (O) this; return (O) this;
} }
private void addFilter(Filter<?> filter) { private void addFilter(Filter<?> filter) {
if (filters == null) { if (filters == null) {
filters = new LinkedHashMap<HelenusProperty, Filter<?>>(); filters = new LinkedHashMap<HelenusProperty, Filter<?>>();
} }
filters.put(filter.getNode().getProperty(), filter); filters.put(filter.getNode().getProperty(), filter);
} }
private void addIfFilter(Filter<?> filter) { private void addIfFilter(Filter<?> filter) {
if (ifFilters == null) { if (ifFilters == null) {
ifFilters = new LinkedList<Filter<?>>(); ifFilters = new LinkedList<Filter<?>>();
} }
ifFilters.add(filter); ifFilters.add(filter);
} }
} }

View file

@ -19,94 +19,95 @@ import java.util.LinkedHashMap;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import net.helenus.core.*; import net.helenus.core.*;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
public abstract class AbstractFilterStreamOperation< public abstract class AbstractFilterStreamOperation<E, O extends AbstractFilterStreamOperation<E, O>>
E, O extends AbstractFilterStreamOperation<E, O>> extends
extends AbstractStreamOperation<E, O> { AbstractStreamOperation<E, O> {
protected Map<HelenusProperty, Filter<?>> filters = null; protected Map<HelenusProperty, Filter<?>> filters = null;
protected List<Filter<?>> ifFilters = null; protected List<Filter<?>> ifFilters = null;
public AbstractFilterStreamOperation(AbstractSessionOperations sessionOperations) { public AbstractFilterStreamOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations); super(sessionOperations);
} }
public <V> O where(Getter<V> getter, Postulate<V> postulate) { public <V> O where(Getter<V> getter, Postulate<V> postulate) {
addFilter(Filter.create(getter, postulate)); addFilter(Filter.create(getter, postulate));
return (O) this; return (O) this;
} }
public <V> O where(Getter<V> getter, Operator operator, V val) { public <V> O where(Getter<V> getter, Operator operator, V val) {
addFilter(Filter.create(getter, operator, val)); addFilter(Filter.create(getter, operator, val));
return (O) this; return (O) this;
} }
public <V> O where(Filter<V> filter) { public <V> O where(Filter<V> filter) {
addFilter(filter); addFilter(filter);
return (O) this; return (O) this;
} }
public <V> O and(Getter<V> getter, Postulate<V> postulate) { public <V> O and(Getter<V> getter, Postulate<V> postulate) {
addFilter(Filter.create(getter, postulate)); addFilter(Filter.create(getter, postulate));
return (O) this; return (O) this;
} }
public <V> O and(Getter<V> getter, Operator operator, V val) { public <V> O and(Getter<V> getter, Operator operator, V val) {
addFilter(Filter.create(getter, operator, val)); addFilter(Filter.create(getter, operator, val));
return (O) this; return (O) this;
} }
public <V> O and(Filter<V> filter) { public <V> O and(Filter<V> filter) {
addFilter(filter); addFilter(filter);
return (O) this; return (O) this;
} }
public <V> O onlyIf(Getter<V> getter, Postulate<V> postulate) { public <V> O onlyIf(Getter<V> getter, Postulate<V> postulate) {
addIfFilter(Filter.create(getter, postulate)); addIfFilter(Filter.create(getter, postulate));
return (O) this; return (O) this;
} }
public <V> O onlyIf(Getter<V> getter, Operator operator, V val) { public <V> O onlyIf(Getter<V> getter, Operator operator, V val) {
addIfFilter(Filter.create(getter, operator, val)); addIfFilter(Filter.create(getter, operator, val));
return (O) this; return (O) this;
} }
public <V> O onlyIf(Filter<V> filter) { public <V> O onlyIf(Filter<V> filter) {
addIfFilter(filter); addIfFilter(filter);
return (O) this; return (O) this;
} }
private void addFilter(Filter<?> filter) { private void addFilter(Filter<?> filter) {
if (filters == null) { if (filters == null) {
filters = new LinkedHashMap<HelenusProperty, Filter<?>>(); filters = new LinkedHashMap<HelenusProperty, Filter<?>>();
} }
filters.put(filter.getNode().getProperty(), filter); filters.put(filter.getNode().getProperty(), filter);
} }
private void addIfFilter(Filter<?> filter) { private void addIfFilter(Filter<?> filter) {
if (ifFilters == null) { if (ifFilters == null) {
ifFilters = new LinkedList<Filter<?>>(); ifFilters = new LinkedList<Filter<?>>();
} }
ifFilters.add(filter); ifFilters.add(filter);
} }
} }

View file

@ -15,58 +15,75 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import java.util.concurrent.CompletableFuture;
import com.codahale.metrics.Timer; import com.codahale.metrics.Timer;
import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.ResultSet;
import java.util.concurrent.CompletableFuture;
import net.helenus.core.AbstractSessionOperations; import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.UnitOfWork; import net.helenus.core.UnitOfWork;
public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>> public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>> extends AbstractStatementOperation<E, O> {
extends AbstractStatementOperation<E, O> {
public abstract E transform(ResultSet resultSet); public AbstractOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations);
}
public boolean cacheable() { public abstract E transform(ResultSet resultSet);
return false;
}
public AbstractOperation(AbstractSessionOperations sessionOperations) { public boolean cacheable() {
super(sessionOperations); return false;
} }
public PreparedOperation<E> prepare() { public PreparedOperation<E> prepare() {
return new PreparedOperation<E>(prepareStatement(), this); return new PreparedOperation<E>(prepareStatement(), this);
} }
public E sync() { public E sync() {// throws TimeoutException {
final Timer.Context context = requestLatency.time(); final Timer.Context context = requestLatency.time();
try { try {
ResultSet resultSet = this.execute(sessionOps, null, traceContext, showValues, false); ResultSet resultSet = this.execute(sessionOps, null, traceContext, queryExecutionTimeout, queryTimeoutUnits,
return transform(resultSet); showValues, false);
} finally { return transform(resultSet);
context.stop(); } finally {
} context.stop();
} }
}
public E sync(UnitOfWork uow) { public E sync(UnitOfWork uow) {// throws TimeoutException {
if (uow == null) return sync(); if (uow == null)
return sync();
final Timer.Context context = requestLatency.time(); final Timer.Context context = requestLatency.time();
try { try {
ResultSet resultSet = execute(sessionOps, uow, traceContext, showValues, true); ResultSet resultSet = execute(sessionOps, uow, traceContext, queryExecutionTimeout, queryTimeoutUnits,
E result = transform(resultSet); showValues, true);
return result; E result = transform(resultSet);
} finally { return result;
context.stop(); } finally {
} context.stop();
} }
}
public CompletableFuture<E> async() { public CompletableFuture<E> async() {
return CompletableFuture.<E>supplyAsync(() -> sync()); return CompletableFuture.<E>supplyAsync(() -> {
} // try {
return sync();
// } catch (TimeoutException ex) {
// throw new CompletionException(ex);
// }
});
}
public CompletableFuture<E> async(UnitOfWork uow) { public CompletableFuture<E> async(UnitOfWork uow) {
if (uow == null) return async(); if (uow == null)
return CompletableFuture.<E>supplyAsync(() -> sync(uow)); return async();
} return CompletableFuture.<E>supplyAsync(() -> {
// try {
return sync();
// } catch (TimeoutException ex) {
// throw new CompletionException(ex);
// }
});
}
} }

View file

@ -15,101 +15,158 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import com.codahale.metrics.Timer; import com.codahale.metrics.Timer;
import com.datastax.driver.core.PreparedStatement; import com.datastax.driver.core.PreparedStatement;
import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.ResultSet;
import com.google.common.base.Function; import com.google.common.base.Function;
import com.google.common.base.Stopwatch;
import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListenableFuture;
import java.util.HashSet;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import net.helenus.core.AbstractSessionOperations; import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.UnitOfWork; import net.helenus.core.UnitOfWork;
import net.helenus.core.cache.CacheUtil;
import net.helenus.core.cache.Facet;
public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOperation<E, O>> public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOperation<E, O>>
extends AbstractStatementOperation<E, O> { extends
AbstractStatementOperation<E, O> {
public AbstractOptionalOperation(AbstractSessionOperations sessionOperations) { public AbstractOptionalOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations); super(sessionOperations);
} }
public abstract Optional<E> transform(ResultSet resultSet); public abstract Optional<E> transform(ResultSet resultSet);
public PreparedOptionalOperation<E> prepare() { public PreparedOptionalOperation<E> prepare() {
return new PreparedOptionalOperation<E>(prepareStatement(), this); return new PreparedOptionalOperation<E>(prepareStatement(), this);
} }
public ListenableFuture<PreparedOptionalOperation<E>> prepareAsync() { public ListenableFuture<PreparedOptionalOperation<E>> prepareAsync() {
final O _this = (O) this; final O _this = (O) this;
return Futures.transform( return Futures.transform(prepareStatementAsync(),
prepareStatementAsync(), new Function<PreparedStatement, PreparedOptionalOperation<E>>() {
new Function<PreparedStatement, PreparedOptionalOperation<E>>() { @Override
@Override public PreparedOptionalOperation<E> apply(PreparedStatement preparedStatement) {
public PreparedOptionalOperation<E> apply(PreparedStatement preparedStatement) { return new PreparedOptionalOperation<E>(preparedStatement, _this);
return new PreparedOptionalOperation<E>(preparedStatement, _this); }
} });
}); }
}
public Optional<E> sync() { public Optional<E> sync() {// throws TimeoutException {
final Timer.Context context = requestLatency.time(); final Timer.Context context = requestLatency.time();
try { try {
ResultSet resultSet = this.execute(sessionOps, null, traceContext, showValues, false); Optional<E> result = Optional.empty();
return transform(resultSet); E cacheResult = null;
} finally { boolean updateCache = isSessionCacheable();
context.stop();
}
}
public Optional<E> sync(UnitOfWork uow) { if (enableCache && isSessionCacheable()) {
if (uow == null) return sync(); List<Facet> facets = bindFacetValues();
String tableName = CacheUtil.schemaName(facets);
cacheResult = (E) sessionOps.checkCache(tableName, facets);
if (cacheResult != null) {
result = Optional.of(cacheResult);
updateCache = false;
}
}
final Timer.Context context = requestLatency.time(); if (!result.isPresent()) {
try { // Formulate the query and execute it against the Cassandra cluster.
ResultSet resultSet = this.execute(sessionOps, null, traceContext, queryExecutionTimeout,
queryTimeoutUnits, showValues, false);
Optional<E> result = null; // Transform the query result set into the desired shape.
String key = getStatementCacheKey(); result = transform(resultSet);
if (enableCache && key != null) { }
Set<E> cachedResult = (Set<E>) uow.cacheLookup(key);
if (cachedResult != null) {
//TODO(gburd): what about select ResultSet, Tuple... etc.?
uowCacheHits.mark();
logger.info("UOW({}) cache hit, {}", uow.hashCode(), key);
result = cachedResult.stream().findFirst();
} else {
uowCacheMiss.mark();
}
}
if (result == null) { if (updateCache && result.isPresent()) {
ResultSet resultSet = execute(sessionOps, uow, traceContext, showValues, true); List<Facet> facets = getFacets();
result = transform(resultSet); if (facets != null && facets.size() > 1) {
sessionOps.updateCache(result.get(), facets);
}
}
return result;
} finally {
context.stop();
}
}
if (key != null) { public Optional<E> sync(UnitOfWork<?> uow) {// throws TimeoutException {
if (result.isPresent()) { if (uow == null)
Set<Object> set = new HashSet<Object>(1); return sync();
set.add(result.get());
uow.getCache().put(key, set);
} else {
uow.getCache().put(key, new HashSet<Object>(0));
}
}
}
return result; final Timer.Context context = requestLatency.time();
} finally { try {
context.stop();
}
}
public CompletableFuture<Optional<E>> async() { Optional<E> result = Optional.empty();
return CompletableFuture.<Optional<E>>supplyAsync(() -> sync()); E cacheResult = null;
} boolean updateCache = true;
public CompletableFuture<Optional<E>> async(UnitOfWork uow) { if (enableCache) {
if (uow == null) return async(); Stopwatch timer = uow.getCacheLookupTimer();
return CompletableFuture.<Optional<E>>supplyAsync(() -> sync(uow)); timer.start();
} List<Facet> facets = bindFacetValues();
cacheResult = checkCache(uow, facets);
if (cacheResult != null) {
result = Optional.of(cacheResult);
updateCache = false;
} else {
if (isSessionCacheable()) {
String tableName = CacheUtil.schemaName(facets);
cacheResult = (E) sessionOps.checkCache(tableName, facets);
if (cacheResult != null) {
result = Optional.of(cacheResult);
}
}
}
timer.stop();
}
if (!result.isPresent()) {
// Formulate the query and execute it against the Cassandra cluster.
ResultSet resultSet = execute(sessionOps, uow, traceContext, queryExecutionTimeout, queryTimeoutUnits,
showValues, true);
// Transform the query result set into the desired shape.
result = transform(resultSet);
}
// If we have a result, it wasn't from the UOW cache, and we're caching things
// then we
// need to put this result into the cache for future requests to find.
if (updateCache && result.isPresent()) {
updateCache(uow, result.get(), getFacets());
}
return result;
} finally {
context.stop();
}
}
public CompletableFuture<Optional<E>> async() {
return CompletableFuture.<Optional<E>>supplyAsync(() -> {
// try {
return sync();
// } catch (TimeoutException ex) {
// throw new CompletionException(ex);
// }
});
}
public CompletableFuture<Optional<E>> async(UnitOfWork<?> uow) {
if (uow == null)
return async();
return CompletableFuture.<Optional<E>>supplyAsync(() -> {
// try {
return sync();
// } catch (TimeoutException ex) {
// throw new CompletionException(ex);
// }
});
}
} }

View file

@ -15,8 +15,15 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import brave.Tracer; import java.util.ArrayList;
import brave.propagation.TraceContext; import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.datastax.driver.core.ConsistencyLevel; import com.datastax.driver.core.ConsistencyLevel;
import com.datastax.driver.core.PreparedStatement; import com.datastax.driver.core.PreparedStatement;
import com.datastax.driver.core.RegularStatement; import com.datastax.driver.core.RegularStatement;
@ -27,268 +34,335 @@ import com.datastax.driver.core.policies.FallthroughRetryPolicy;
import com.datastax.driver.core.policies.RetryPolicy; import com.datastax.driver.core.policies.RetryPolicy;
import com.datastax.driver.core.querybuilder.BuiltStatement; import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListenableFuture;
import brave.Tracer;
import brave.propagation.TraceContext;
import net.helenus.core.AbstractSessionOperations; import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.UnitOfWork;
import net.helenus.core.cache.Facet;
import net.helenus.core.cache.UnboundFacet;
import net.helenus.core.reflect.MapExportable;
import net.helenus.mapping.value.BeanColumnValueProvider;
import net.helenus.support.HelenusException; import net.helenus.support.HelenusException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class AbstractStatementOperation<E, O extends AbstractStatementOperation<E, O>> public abstract class AbstractStatementOperation<E, O extends AbstractStatementOperation<E, O>> extends Operation<E> {
extends Operation<E> {
final Logger logger = LoggerFactory.getLogger(getClass()); private static final Logger LOG = LoggerFactory.getLogger(AbstractStatementOperation.class);
public abstract Statement buildStatement(boolean cached); protected boolean enableCache = true;
protected boolean showValues = true;
protected TraceContext traceContext;
long queryExecutionTimeout = 10;
TimeUnit queryTimeoutUnits = TimeUnit.SECONDS;
private ConsistencyLevel consistencyLevel;
private ConsistencyLevel serialConsistencyLevel;
private RetryPolicy retryPolicy;
private boolean idempotent = false;
private boolean enableTracing = false;
private long[] defaultTimestamp = null;
private int[] fetchSize = null;
protected boolean enableCache = true; public AbstractStatementOperation(AbstractSessionOperations sessionOperations) {
protected boolean showValues = true; super(sessionOperations);
protected TraceContext traceContext; this.consistencyLevel = sessionOperations.getDefaultConsistencyLevel();
private ConsistencyLevel consistencyLevel; this.idempotent = sessionOperations.getDefaultQueryIdempotency();
private ConsistencyLevel serialConsistencyLevel; }
private RetryPolicy retryPolicy;
private boolean idempotent = false;
private boolean enableTracing = false;
private long[] defaultTimestamp = null;
private int[] fetchSize = null;
public AbstractStatementOperation(AbstractSessionOperations sessionOperations) { public abstract Statement buildStatement(boolean cached);
super(sessionOperations);
this.consistencyLevel = sessionOperations.getDefaultConsistencyLevel();
this.idempotent = sessionOperations.getDefaultQueryIdempotency();
}
public O ignoreCache(boolean enabled) { public O ignoreCache(boolean enabled) {
enableCache = enabled; enableCache = enabled;
return (O) this; return (O) this;
} }
public O ignoreCache() { public O ignoreCache() {
enableCache = true; enableCache = true;
return (O) this; return (O) this;
} }
public O showValues(boolean enabled) { public O showValues(boolean enabled) {
this.showValues = enabled; this.showValues = enabled;
return (O) this; return (O) this;
} }
public O defaultTimestamp(long timestamp) { public O defaultTimestamp(long timestamp) {
this.defaultTimestamp = new long[1]; this.defaultTimestamp = new long[1];
this.defaultTimestamp[0] = timestamp; this.defaultTimestamp[0] = timestamp;
return (O) this; return (O) this;
} }
public O retryPolicy(RetryPolicy retryPolicy) { public O retryPolicy(RetryPolicy retryPolicy) {
this.retryPolicy = retryPolicy; this.retryPolicy = retryPolicy;
return (O) this; return (O) this;
} }
public O defaultRetryPolicy() { public O defaultRetryPolicy() {
this.retryPolicy = DefaultRetryPolicy.INSTANCE; this.retryPolicy = DefaultRetryPolicy.INSTANCE;
return (O) this; return (O) this;
} }
public O idempotent() { public O idempotent() {
this.idempotent = true; this.idempotent = true;
return (O) this; return (O) this;
} }
public O isIdempotent(boolean idempotent) { public O isIdempotent(boolean idempotent) {
this.idempotent = idempotent; this.idempotent = idempotent;
return (O) this; return (O) this;
} }
public O downgradingConsistencyRetryPolicy() { public O downgradingConsistencyRetryPolicy() {
this.retryPolicy = DowngradingConsistencyRetryPolicy.INSTANCE; this.retryPolicy = DowngradingConsistencyRetryPolicy.INSTANCE;
return (O) this; return (O) this;
} }
public O fallthroughRetryPolicy() { public O fallthroughRetryPolicy() {
this.retryPolicy = FallthroughRetryPolicy.INSTANCE; this.retryPolicy = FallthroughRetryPolicy.INSTANCE;
return (O) this; return (O) this;
} }
public O consistency(ConsistencyLevel level) { public O consistency(ConsistencyLevel level) {
this.consistencyLevel = level; this.consistencyLevel = level;
return (O) this; return (O) this;
} }
public O consistencyAny() { public O consistencyAny() {
this.consistencyLevel = ConsistencyLevel.ANY; this.consistencyLevel = ConsistencyLevel.ANY;
return (O) this; return (O) this;
} }
public O consistencyOne() { public O consistencyOne() {
this.consistencyLevel = ConsistencyLevel.ONE; this.consistencyLevel = ConsistencyLevel.ONE;
return (O) this; return (O) this;
} }
public O consistencyQuorum() { public O consistencyQuorum() {
this.consistencyLevel = ConsistencyLevel.QUORUM; this.consistencyLevel = ConsistencyLevel.QUORUM;
return (O) this; return (O) this;
} }
public O consistencyAll() { public O consistencyAll() {
this.consistencyLevel = ConsistencyLevel.ALL; this.consistencyLevel = ConsistencyLevel.ALL;
return (O) this; return (O) this;
} }
public O consistencyLocalOne() { public O consistencyLocalOne() {
this.consistencyLevel = ConsistencyLevel.LOCAL_ONE; this.consistencyLevel = ConsistencyLevel.LOCAL_ONE;
return (O) this; return (O) this;
} }
public O consistencyLocalQuorum() { public O consistencyLocalQuorum() {
this.consistencyLevel = ConsistencyLevel.LOCAL_QUORUM; this.consistencyLevel = ConsistencyLevel.LOCAL_QUORUM;
return (O) this; return (O) this;
} }
public O consistencyEachQuorum() { public O consistencyEachQuorum() {
this.consistencyLevel = ConsistencyLevel.EACH_QUORUM; this.consistencyLevel = ConsistencyLevel.EACH_QUORUM;
return (O) this; return (O) this;
} }
public O serialConsistency(ConsistencyLevel level) { public O serialConsistency(ConsistencyLevel level) {
this.serialConsistencyLevel = level; this.serialConsistencyLevel = level;
return (O) this; return (O) this;
} }
public O serialConsistencyAny() { public O serialConsistencyAny() {
this.serialConsistencyLevel = ConsistencyLevel.ANY; this.serialConsistencyLevel = ConsistencyLevel.ANY;
return (O) this; return (O) this;
} }
public O serialConsistencyOne() { public O serialConsistencyOne() {
this.serialConsistencyLevel = ConsistencyLevel.ONE; this.serialConsistencyLevel = ConsistencyLevel.ONE;
return (O) this; return (O) this;
} }
public O serialConsistencyQuorum() { public O serialConsistencyQuorum() {
this.serialConsistencyLevel = ConsistencyLevel.QUORUM; this.serialConsistencyLevel = ConsistencyLevel.QUORUM;
return (O) this; return (O) this;
} }
public O serialConsistencyAll() { public O serialConsistencyAll() {
this.serialConsistencyLevel = ConsistencyLevel.ALL; this.serialConsistencyLevel = ConsistencyLevel.ALL;
return (O) this; return (O) this;
} }
public O serialConsistencyLocal() { public O serialConsistencyLocal() {
this.serialConsistencyLevel = ConsistencyLevel.LOCAL_SERIAL; this.serialConsistencyLevel = ConsistencyLevel.LOCAL_SERIAL;
return (O) this; return (O) this;
} }
public O serialConsistencyLocalQuorum() { public O serialConsistencyLocalQuorum() {
this.serialConsistencyLevel = ConsistencyLevel.LOCAL_QUORUM; this.serialConsistencyLevel = ConsistencyLevel.LOCAL_QUORUM;
return (O) this; return (O) this;
} }
public O disableTracing() { public O disableTracing() {
this.enableTracing = false; this.enableTracing = false;
return (O) this; return (O) this;
} }
public O enableTracing() { public O enableTracing() {
this.enableTracing = true; this.enableTracing = true;
return (O) this; return (O) this;
} }
public O tracing(boolean enable) { public O tracing(boolean enable) {
this.enableTracing = enable; this.enableTracing = enable;
return (O) this; return (O) this;
} }
public O fetchSize(int fetchSize) { public O fetchSize(int fetchSize) {
this.fetchSize = new int[1]; this.fetchSize = new int[1];
this.fetchSize[0] = fetchSize; this.fetchSize[0] = fetchSize;
return (O) this; return (O) this;
} }
public Statement options(Statement statement) { public O queryTimeoutMs(long ms) {
this.queryExecutionTimeout = ms;
this.queryTimeoutUnits = TimeUnit.MILLISECONDS;
return (O) this;
}
if (defaultTimestamp != null) { public O queryTimeout(long timeout, TimeUnit units) {
statement.setDefaultTimestamp(defaultTimestamp[0]); this.queryExecutionTimeout = timeout;
} this.queryTimeoutUnits = units;
return (O) this;
}
if (consistencyLevel != null) { public Statement options(Statement statement) {
statement.setConsistencyLevel(consistencyLevel);
}
if (serialConsistencyLevel != null) { if (defaultTimestamp != null) {
statement.setSerialConsistencyLevel(serialConsistencyLevel); statement.setDefaultTimestamp(defaultTimestamp[0]);
} }
if (retryPolicy != null) { if (consistencyLevel != null) {
statement.setRetryPolicy(retryPolicy); statement.setConsistencyLevel(consistencyLevel);
} }
if (enableTracing) { if (serialConsistencyLevel != null) {
statement.enableTracing(); statement.setSerialConsistencyLevel(serialConsistencyLevel);
} else { }
statement.disableTracing();
}
if (fetchSize != null) { if (retryPolicy != null) {
statement.setFetchSize(fetchSize[0]); statement.setRetryPolicy(retryPolicy);
} }
if (idempotent) { if (enableTracing) {
statement.setIdempotent(true); statement.enableTracing();
} } else {
statement.disableTracing();
}
return statement; if (fetchSize != null) {
} statement.setFetchSize(fetchSize[0]);
}
public O zipkinContext(TraceContext traceContext) { if (idempotent) {
if (traceContext != null) { statement.setIdempotent(true);
Tracer tracer = this.sessionOps.getZipkinTracer(); }
if (tracer != null) {
this.traceContext = traceContext;
}
}
return (O) this; return statement;
} }
public Statement statement() { public O zipkinContext(TraceContext traceContext) {
return buildStatement(false); if (traceContext != null) {
} Tracer tracer = this.sessionOps.getZipkinTracer();
if (tracer != null) {
this.traceContext = traceContext;
}
}
public String cql() { return (O) this;
Statement statement = buildStatement(false); }
if (statement == null) return "";
if (statement instanceof BuiltStatement) {
BuiltStatement buildStatement = (BuiltStatement) statement;
return buildStatement.setForceNoValues(true).getQueryString();
} else {
return statement.toString();
}
}
public PreparedStatement prepareStatement() { public Statement statement() {
return buildStatement(false);
}
Statement statement = buildStatement(true); public String cql() {
Statement statement = buildStatement(false);
if (statement == null)
return "";
if (statement instanceof BuiltStatement) {
BuiltStatement buildStatement = (BuiltStatement) statement;
return buildStatement.setForceNoValues(true).getQueryString();
} else {
return statement.toString();
}
}
if (statement instanceof RegularStatement) { public PreparedStatement prepareStatement() {
RegularStatement regularStatement = (RegularStatement) statement; Statement statement = buildStatement(true);
return sessionOps.prepare(regularStatement); if (statement instanceof RegularStatement) {
}
throw new HelenusException("only RegularStatements can be prepared"); RegularStatement regularStatement = (RegularStatement) statement;
}
public ListenableFuture<PreparedStatement> prepareStatementAsync() { return sessionOps.prepare(regularStatement);
}
Statement statement = buildStatement(true); throw new HelenusException("only RegularStatements can be prepared");
}
if (statement instanceof RegularStatement) { public ListenableFuture<PreparedStatement> prepareStatementAsync() {
RegularStatement regularStatement = (RegularStatement) statement; Statement statement = buildStatement(true);
return sessionOps.prepareAsync(regularStatement); if (statement instanceof RegularStatement) {
}
throw new HelenusException("only RegularStatements can be prepared"); RegularStatement regularStatement = (RegularStatement) statement;
}
return sessionOps.prepareAsync(regularStatement);
}
throw new HelenusException("only RegularStatements can be prepared");
}
protected E checkCache(UnitOfWork<?> uow, List<Facet> facets) {
E result = null;
Optional<Object> optionalCachedResult = Optional.empty();
if (!facets.isEmpty()) {
optionalCachedResult = uow.cacheLookup(facets);
if (optionalCachedResult.isPresent()) {
uowCacheHits.mark();
LOG.info("UnitOfWork({}) cache hit using facets", uow.hashCode());
result = (E) optionalCachedResult.get();
}
}
if (result == null) {
uowCacheMiss.mark();
LOG.info("UnitOfWork({}) cache miss", uow.hashCode());
}
return result;
}
protected void updateCache(UnitOfWork<?> uow, E pojo, List<Facet> identifyingFacets) {
List<Facet> facets = new ArrayList<>();
Map<String, Object> valueMap = pojo instanceof MapExportable ? ((MapExportable) pojo).toMap() : null;
for (Facet facet : identifyingFacets) {
if (facet instanceof UnboundFacet) {
UnboundFacet unboundFacet = (UnboundFacet) facet;
UnboundFacet.Binder binder = unboundFacet.binder();
unboundFacet.getProperties().forEach(prop -> {
if (valueMap == null) {
Object value = BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
binder.setValueForProperty(prop, value.toString());
} else {
binder.setValueForProperty(prop, valueMap.get(prop.getPropertyName()).toString());
}
facets.add(binder.bind());
});
} else {
facets.add(facet);
}
}
// Cache the value (pojo), the statement key, and the fully bound facets.
uow.cacheUpdate(pojo, facets);
}
} }

View file

@ -15,93 +15,158 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Stream;
import com.codahale.metrics.Timer; import com.codahale.metrics.Timer;
import com.datastax.driver.core.PreparedStatement; import com.datastax.driver.core.PreparedStatement;
import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.ResultSet;
import com.google.common.base.Function; import com.google.common.base.Function;
import com.google.common.base.Stopwatch;
import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListenableFuture;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Stream;
import net.helenus.core.AbstractSessionOperations; import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.UnitOfWork; import net.helenus.core.UnitOfWork;
import net.helenus.core.cache.CacheUtil;
import net.helenus.core.cache.Facet;
public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperation<E, O>> public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperation<E, O>>
extends AbstractStatementOperation<E, O> { extends
AbstractStatementOperation<E, O> {
public AbstractStreamOperation(AbstractSessionOperations sessionOperations) { public AbstractStreamOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations); super(sessionOperations);
} }
public abstract Stream<E> transform(ResultSet resultSet); public abstract Stream<E> transform(ResultSet resultSet);
public PreparedStreamOperation<E> prepare() { public PreparedStreamOperation<E> prepare() {
return new PreparedStreamOperation<E>(prepareStatement(), this); return new PreparedStreamOperation<E>(prepareStatement(), this);
} }
public ListenableFuture<PreparedStreamOperation<E>> prepareAsync() { public ListenableFuture<PreparedStreamOperation<E>> prepareAsync() {
final O _this = (O) this; final O _this = (O) this;
return Futures.transform( return Futures.transform(prepareStatementAsync(),
prepareStatementAsync(), new Function<PreparedStatement, PreparedStreamOperation<E>>() {
new Function<PreparedStatement, PreparedStreamOperation<E>>() { @Override
@Override public PreparedStreamOperation<E> apply(PreparedStatement preparedStatement) {
public PreparedStreamOperation<E> apply(PreparedStatement preparedStatement) { return new PreparedStreamOperation<E>(preparedStatement, _this);
return new PreparedStreamOperation<E>(preparedStatement, _this); }
} });
}); }
}
public Stream<E> sync() { public Stream<E> sync() {// throws TimeoutException {
final Timer.Context context = requestLatency.time(); final Timer.Context context = requestLatency.time();
try { try {
ResultSet resultSet = this.execute(sessionOps, null, traceContext, showValues, false); Stream<E> resultStream = null;
return transform(resultSet); E cacheResult = null;
} finally { boolean updateCache = isSessionCacheable();
context.stop();
}
}
public Stream<E> sync(UnitOfWork uow) { if (enableCache && isSessionCacheable()) {
if (uow == null) return sync(); List<Facet> facets = bindFacetValues();
String tableName = CacheUtil.schemaName(facets);
cacheResult = (E) sessionOps.checkCache(tableName, facets);
if (cacheResult != null) {
resultStream = Stream.of(cacheResult);
updateCache = false;
}
}
final Timer.Context context = requestLatency.time(); if (resultStream == null) {
try { // Formulate the query and execute it against the Cassandra cluster.
Stream<E> result = null; ResultSet resultSet = this.execute(sessionOps, null, traceContext, queryExecutionTimeout,
String key = getStatementCacheKey(); queryTimeoutUnits, showValues, false);
if (enableCache && key != null) {
Set<E> cachedResult = (Set<E>) uow.cacheLookup(key);
if (cachedResult != null) {
//TODO(gburd): what about select ResultSet, Tuple... etc.?
uowCacheHits.mark();
logger.info("UOW({}) cache hit, {}", uow.hashCode());
result = cachedResult.stream();
} else {
uowCacheMiss.mark();
}
}
if (result == null) { // Transform the query result set into the desired shape.
ResultSet resultSet = execute(sessionOps, uow, traceContext, showValues, true); resultStream = transform(resultSet);
result = transform(resultSet); }
if (key != null) { if (updateCache && resultStream != null) {
uow.getCache().put(key, (Set<Object>) result); List<Facet> facets = getFacets();
} if (facets != null && facets.size() > 1) {
} List<E> again = new ArrayList<>();
resultStream.forEach(result -> {
sessionOps.updateCache(result, facets);
again.add(result);
});
resultStream = again.stream();
}
}
return resultStream;
return result; } finally {
} finally { context.stop();
context.stop(); }
} }
}
public CompletableFuture<Stream<E>> async() { public Stream<E> sync(UnitOfWork<?> uow) {// throws TimeoutException {
return CompletableFuture.<Stream<E>>supplyAsync(() -> sync()); if (uow == null)
} return sync();
public CompletableFuture<Stream<E>> async(UnitOfWork uow) { final Timer.Context context = requestLatency.time();
if (uow == null) return async(); try {
return CompletableFuture.<Stream<E>>supplyAsync(() -> sync(uow)); Stream<E> resultStream = null;
} E cachedResult = null;
boolean updateCache = true;
if (enableCache) {
Stopwatch timer = uow.getCacheLookupTimer();
timer.start();
List<Facet> facets = bindFacetValues();
cachedResult = checkCache(uow, facets);
if (cachedResult != null) {
resultStream = Stream.of(cachedResult);
updateCache = false;
}
timer.stop();
}
if (resultStream == null) {
ResultSet resultSet = execute(sessionOps, uow, traceContext, queryExecutionTimeout, queryTimeoutUnits,
showValues, true);
resultStream = transform(resultSet);
}
// If we have a result and we're caching then we need to put it into the cache
// for future requests to find.
if (updateCache && resultStream != null) {
List<E> again = new ArrayList<>();
List<Facet> facets = getFacets();
resultStream.forEach(result -> {
updateCache(uow, result, facets);
again.add(result);
});
resultStream = again.stream();
}
return resultStream;
} finally {
context.stop();
}
}
public CompletableFuture<Stream<E>> async() {
return CompletableFuture.<Stream<E>>supplyAsync(() -> {
// try {
return sync();
// } catch (TimeoutException ex) {
// throw new CompletionException(ex);
// }
});
}
public CompletableFuture<Stream<E>> async(UnitOfWork<?> uow) {
if (uow == null)
return async();
return CompletableFuture.<Stream<E>>supplyAsync(() -> {
// try {
return sync();
// } catch (TimeoutException ex) {
// throw new CompletionException(ex);
// }
});
}
} }

View file

@ -21,22 +21,27 @@ import com.datastax.driver.core.Statement;
public final class BoundOperation<E> extends AbstractOperation<E, BoundOperation<E>> { public final class BoundOperation<E> extends AbstractOperation<E, BoundOperation<E>> {
private final BoundStatement boundStatement; private final BoundStatement boundStatement;
private final AbstractOperation<E, ?> delegate; private final AbstractOperation<E, ?> delegate;
public BoundOperation(BoundStatement boundStatement, AbstractOperation<E, ?> operation) { public BoundOperation(BoundStatement boundStatement, AbstractOperation<E, ?> operation) {
super(operation.sessionOps); super(operation.sessionOps);
this.boundStatement = boundStatement; this.boundStatement = boundStatement;
this.delegate = operation; this.delegate = operation;
} }
@Override @Override
public E transform(ResultSet resultSet) { public E transform(ResultSet resultSet) {
return delegate.transform(resultSet); return delegate.transform(resultSet);
} }
@Override @Override
public Statement buildStatement(boolean cached) { public Statement buildStatement(boolean cached) {
return boundStatement; return boundStatement;
} }
@Override
public boolean isSessionCacheable() {
return delegate.isSessionCacheable();
}
} }

View file

@ -15,31 +15,35 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import java.util.Optional;
import com.datastax.driver.core.BoundStatement; import com.datastax.driver.core.BoundStatement;
import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.Statement; import com.datastax.driver.core.Statement;
import java.util.Optional;
public final class BoundOptionalOperation<E> public final class BoundOptionalOperation<E> extends AbstractOptionalOperation<E, BoundOptionalOperation<E>> {
extends AbstractOptionalOperation<E, BoundOptionalOperation<E>> {
private final BoundStatement boundStatement; private final BoundStatement boundStatement;
private final AbstractOptionalOperation<E, ?> delegate; private final AbstractOptionalOperation<E, ?> delegate;
public BoundOptionalOperation( public BoundOptionalOperation(BoundStatement boundStatement, AbstractOptionalOperation<E, ?> operation) {
BoundStatement boundStatement, AbstractOptionalOperation<E, ?> operation) { super(operation.sessionOps);
super(operation.sessionOps); this.boundStatement = boundStatement;
this.boundStatement = boundStatement; this.delegate = operation;
this.delegate = operation; }
}
@Override @Override
public Optional<E> transform(ResultSet resultSet) { public Optional<E> transform(ResultSet resultSet) {
return delegate.transform(resultSet); return delegate.transform(resultSet);
} }
@Override @Override
public Statement buildStatement(boolean cached) { public Statement buildStatement(boolean cached) {
return boundStatement; return boundStatement;
} }
@Override
public boolean isSessionCacheable() {
return delegate.isSessionCacheable();
}
} }

View file

@ -15,36 +15,43 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import java.util.List;
import java.util.stream.Stream;
import com.datastax.driver.core.BoundStatement; import com.datastax.driver.core.BoundStatement;
import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.Statement; import com.datastax.driver.core.Statement;
import java.util.stream.Stream;
public final class BoundStreamOperation<E> import net.helenus.core.cache.Facet;
extends AbstractStreamOperation<E, BoundStreamOperation<E>> {
private final BoundStatement boundStatement; public final class BoundStreamOperation<E> extends AbstractStreamOperation<E, BoundStreamOperation<E>> {
private final AbstractStreamOperation<E, ?> delegate;
public BoundStreamOperation( private final BoundStatement boundStatement;
BoundStatement boundStatement, AbstractStreamOperation<E, ?> operation) { private final AbstractStreamOperation<E, ?> delegate;
super(operation.sessionOps);
this.boundStatement = boundStatement;
this.delegate = operation;
}
@Override public BoundStreamOperation(BoundStatement boundStatement, AbstractStreamOperation<E, ?> operation) {
public String getStatementCacheKey() { super(operation.sessionOps);
return delegate.getStatementCacheKey(); this.boundStatement = boundStatement;
} this.delegate = operation;
}
@Override @Override
public Stream<E> transform(ResultSet resultSet) { public List<Facet> bindFacetValues() {
return delegate.transform(resultSet); return delegate.bindFacetValues();
} }
@Override @Override
public Statement buildStatement(boolean cached) { public Stream<E> transform(ResultSet resultSet) {
return boundStatement; return delegate.transform(resultSet);
} }
@Override
public Statement buildStatement(boolean cached) {
return boundStatement;
}
@Override
public boolean isSessionCacheable() {
return delegate.isSessionCacheable();
}
} }

View file

@ -20,6 +20,7 @@ import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.datastax.driver.core.querybuilder.QueryBuilder; import com.datastax.driver.core.querybuilder.QueryBuilder;
import com.datastax.driver.core.querybuilder.Select; import com.datastax.driver.core.querybuilder.Select;
import com.datastax.driver.core.querybuilder.Select.Where; import com.datastax.driver.core.querybuilder.Select.Where;
import net.helenus.core.AbstractSessionOperations; import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.Filter; import net.helenus.core.Filter;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
@ -28,56 +29,53 @@ import net.helenus.support.HelenusMappingException;
public final class CountOperation extends AbstractFilterOperation<Long, CountOperation> { public final class CountOperation extends AbstractFilterOperation<Long, CountOperation> {
private HelenusEntity entity; private HelenusEntity entity;
public CountOperation(AbstractSessionOperations sessionOperations) { public CountOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations); super(sessionOperations);
} }
public CountOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity) { public CountOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity) {
super(sessionOperations); super(sessionOperations);
this.entity = entity; this.entity = entity;
} }
@Override @Override
public BuiltStatement buildStatement(boolean cached) { public BuiltStatement buildStatement(boolean cached) {
if (filters != null && !filters.isEmpty()) { if (filters != null && !filters.isEmpty()) {
filters.forEach(f -> addPropertyNode(f.getNode())); filters.forEach(f -> addPropertyNode(f.getNode()));
} }
if (entity == null) { if (entity == null) {
throw new HelenusMappingException("unknown entity"); throw new HelenusMappingException("unknown entity");
} }
Select select = QueryBuilder.select().countAll().from(entity.getName().toCql()); Select select = QueryBuilder.select().countAll().from(entity.getName().toCql());
if (filters != null && !filters.isEmpty()) { if (filters != null && !filters.isEmpty()) {
Where where = select.where(); Where where = select.where();
for (Filter<?> filter : filters) { for (Filter<?> filter : filters) {
where.and(filter.getClause(sessionOps.getValuePreparer())); where.and(filter.getClause(sessionOps.getValuePreparer()));
} }
} }
return select; return select;
} }
@Override @Override
public Long transform(ResultSet resultSet) { public Long transform(ResultSet resultSet) {
return resultSet.one().getLong(0); return resultSet.one().getLong(0);
} }
private void addPropertyNode(HelenusPropertyNode p) { private void addPropertyNode(HelenusPropertyNode p) {
if (entity == null) { if (entity == null) {
entity = p.getEntity(); entity = p.getEntity();
} else if (entity != p.getEntity()) { } else if (entity != p.getEntity()) {
throw new HelenusMappingException( throw new HelenusMappingException("you can count columns only in single entity "
"you can count columns only in single entity " + entity.getMappingInterface() + " or " + p.getEntity().getMappingInterface());
+ entity.getMappingInterface() }
+ " or " }
+ p.getEntity().getMappingInterface());
}
}
} }

View file

@ -20,6 +20,7 @@ import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.datastax.driver.core.querybuilder.Delete; import com.datastax.driver.core.querybuilder.Delete;
import com.datastax.driver.core.querybuilder.Delete.Where; import com.datastax.driver.core.querybuilder.Delete.Where;
import com.datastax.driver.core.querybuilder.QueryBuilder; import com.datastax.driver.core.querybuilder.QueryBuilder;
import net.helenus.core.AbstractSessionOperations; import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.Filter; import net.helenus.core.Filter;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
@ -28,100 +29,97 @@ import net.helenus.support.HelenusMappingException;
public final class DeleteOperation extends AbstractFilterOperation<ResultSet, DeleteOperation> { public final class DeleteOperation extends AbstractFilterOperation<ResultSet, DeleteOperation> {
private HelenusEntity entity; private HelenusEntity entity;
private boolean ifExists = false; private boolean ifExists = false;
private int[] ttl; private int[] ttl;
private long[] timestamp; private long[] timestamp;
public DeleteOperation(AbstractSessionOperations sessionOperations) { public DeleteOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations); super(sessionOperations);
} }
public DeleteOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity) { public DeleteOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity) {
super(sessionOperations); super(sessionOperations);
this.entity = entity; this.entity = entity;
} }
@Override @Override
public BuiltStatement buildStatement(boolean cached) { public BuiltStatement buildStatement(boolean cached) {
if (filters != null && !filters.isEmpty()) { if (filters != null && !filters.isEmpty()) {
filters.forEach(f -> addPropertyNode(f.getNode())); filters.forEach(f -> addPropertyNode(f.getNode()));
} }
if (entity == null) { if (entity == null) {
throw new HelenusMappingException("unknown entity"); throw new HelenusMappingException("unknown entity");
} }
if (filters != null && !filters.isEmpty()) { if (filters != null && !filters.isEmpty()) {
Delete delete = QueryBuilder.delete().from(entity.getName().toCql()); Delete delete = QueryBuilder.delete().from(entity.getName().toCql());
if (this.ifExists) { if (this.ifExists) {
delete.ifExists(); delete.ifExists();
} }
Where where = delete.where(); Where where = delete.where();
for (Filter<?> filter : filters) { for (Filter<?> filter : filters) {
where.and(filter.getClause(sessionOps.getValuePreparer())); where.and(filter.getClause(sessionOps.getValuePreparer()));
} }
if (ifFilters != null && !ifFilters.isEmpty()) { if (ifFilters != null && !ifFilters.isEmpty()) {
for (Filter<?> filter : ifFilters) { for (Filter<?> filter : ifFilters) {
delete.onlyIf(filter.getClause(sessionOps.getValuePreparer())); delete.onlyIf(filter.getClause(sessionOps.getValuePreparer()));
} }
} }
if (this.ttl != null) { if (this.ttl != null) {
delete.using(QueryBuilder.ttl(this.ttl[0])); delete.using(QueryBuilder.ttl(this.ttl[0]));
} }
if (this.timestamp != null) { if (this.timestamp != null) {
delete.using(QueryBuilder.timestamp(this.timestamp[0])); delete.using(QueryBuilder.timestamp(this.timestamp[0]));
} }
return delete; return delete;
} else { } else {
return QueryBuilder.truncate(entity.getName().toCql()); return QueryBuilder.truncate(entity.getName().toCql());
} }
} }
@Override @Override
public ResultSet transform(ResultSet resultSet) { public ResultSet transform(ResultSet resultSet) {
return resultSet; return resultSet;
} }
public DeleteOperation ifExists() { public DeleteOperation ifExists() {
this.ifExists = true; this.ifExists = true;
return this; return this;
} }
public DeleteOperation usingTtl(int ttl) { public DeleteOperation usingTtl(int ttl) {
this.ttl = new int[1]; this.ttl = new int[1];
this.ttl[0] = ttl; this.ttl[0] = ttl;
return this; return this;
} }
public DeleteOperation usingTimestamp(long timestamp) { public DeleteOperation usingTimestamp(long timestamp) {
this.timestamp = new long[1]; this.timestamp = new long[1];
this.timestamp[0] = timestamp; this.timestamp[0] = timestamp;
return this; return this;
} }
private void addPropertyNode(HelenusPropertyNode p) { private void addPropertyNode(HelenusPropertyNode p) {
if (entity == null) { if (entity == null) {
entity = p.getEntity(); entity = p.getEntity();
} else if (entity != p.getEntity()) { } else if (entity != p.getEntity()) {
throw new HelenusMappingException( throw new HelenusMappingException("you can delete rows only in single entity "
"you can delete rows only in single entity " + entity.getMappingInterface() + " or " + p.getEntity().getMappingInterface());
+ entity.getMappingInterface() }
+ " or " }
+ p.getEntity().getMappingInterface());
}
}
} }

View file

@ -15,18 +15,20 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import java.util.*;
import java.util.function.Function;
import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement; import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.datastax.driver.core.querybuilder.Insert; import com.datastax.driver.core.querybuilder.Insert;
import com.datastax.driver.core.querybuilder.QueryBuilder; import com.datastax.driver.core.querybuilder.QueryBuilder;
import com.google.common.base.Joiner;
import java.util.*;
import java.util.function.Function;
import net.helenus.core.AbstractSessionOperations; import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.Getter; import net.helenus.core.Getter;
import net.helenus.core.Helenus; import net.helenus.core.Helenus;
import net.helenus.core.UnitOfWork; import net.helenus.core.UnitOfWork;
import net.helenus.core.reflect.DefaultPrimitiveTypes; import net.helenus.core.reflect.DefaultPrimitiveTypes;
import net.helenus.core.reflect.Drafted;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
@ -38,230 +40,210 @@ import net.helenus.support.HelenusMappingException;
public final class InsertOperation<T> extends AbstractOperation<T, InsertOperation<T>> { public final class InsertOperation<T> extends AbstractOperation<T, InsertOperation<T>> {
private HelenusEntity entity; private final List<Fun.Tuple2<HelenusPropertyNode, Object>> values = new ArrayList<Fun.Tuple2<HelenusPropertyNode, Object>>();
private final T pojo;
private final Class<?> resultType;
private HelenusEntity entity;
private boolean ifNotExists;
private final List<Fun.Tuple2<HelenusPropertyNode, Object>> values = private int[] ttl;
new ArrayList<Fun.Tuple2<HelenusPropertyNode, Object>>(); private long[] timestamp;
private final T pojo;
private final Class<?> resultType;
private boolean ifNotExists;
private int[] ttl; public InsertOperation(AbstractSessionOperations sessionOperations, boolean ifNotExists) {
private long[] timestamp; super(sessionOperations);
public InsertOperation(AbstractSessionOperations sessionOperations, boolean ifNotExists) { this.ifNotExists = ifNotExists;
super(sessionOperations); this.pojo = null;
this.resultType = ResultSet.class;
}
this.ifNotExists = ifNotExists; public InsertOperation(AbstractSessionOperations sessionOperations, Class<?> resultType, boolean ifNotExists) {
this.pojo = null; super(sessionOperations);
this.resultType = ResultSet.class;
}
public InsertOperation( this.ifNotExists = ifNotExists;
AbstractSessionOperations sessionOperations, Class<?> resultType, boolean ifNotExists) { this.pojo = null;
super(sessionOperations); this.resultType = resultType;
}
this.ifNotExists = ifNotExists; public InsertOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity, T pojo,
this.pojo = null; Set<String> mutations, boolean ifNotExists) {
this.resultType = resultType; super(sessionOperations);
}
public InsertOperation( this.entity = entity;
AbstractSessionOperations sessionOperations, this.pojo = pojo;
HelenusEntity entity, this.ifNotExists = ifNotExists;
T pojo, this.resultType = entity.getMappingInterface();
Set<String> mutations,
boolean ifNotExists) {
super(sessionOperations);
this.entity = entity; Collection<HelenusProperty> properties = entity.getOrderedProperties();
this.pojo = pojo; Set<String> keys = (mutations == null) ? null : mutations;
this.ifNotExists = ifNotExists;
this.resultType = entity.getMappingInterface();
Collection<HelenusProperty> properties = entity.getOrderedProperties(); for (HelenusProperty prop : properties) {
Set<String> keys = (mutations == null) ? null : mutations; boolean addProp = false;
for (HelenusProperty prop : properties) { switch (prop.getColumnType()) {
case PARTITION_KEY :
case CLUSTERING_COLUMN :
addProp = true;
break;
default :
addProp = (keys == null || keys.contains(prop.getPropertyName()));
}
if (keys == null || keys.contains(prop.getPropertyName())) { if (addProp) {
Object value = BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop);
value = sessionOps.getValuePreparer().prepareColumnValue(value, prop);
Object value = BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop); if (value != null) {
value = sessionOps.getValuePreparer().prepareColumnValue(value, prop); HelenusPropertyNode node = new HelenusPropertyNode(prop, Optional.empty());
values.add(Fun.Tuple2.of(node, value));
}
}
}
}
if (value != null) { public InsertOperation<T> ifNotExists() {
HelenusPropertyNode node = new HelenusPropertyNode(prop, Optional.empty()); this.ifNotExists = true;
values.add(Fun.Tuple2.of(node, value)); return this;
} }
}
}
}
public InsertOperation<T> ifNotExists() { public InsertOperation<T> ifNotExists(boolean enable) {
this.ifNotExists = true; this.ifNotExists = enable;
return this; return this;
} }
public InsertOperation<T> ifNotExists(boolean enable) { public <V> InsertOperation<T> value(Getter<V> getter, V val) {
this.ifNotExists = enable;
return this;
}
public <V> InsertOperation<T> value(Getter<V> getter, V val) { Objects.requireNonNull(getter, "getter is empty");
Objects.requireNonNull(getter, "getter is empty"); if (val != null) {
HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter);
Object value = sessionOps.getValuePreparer().prepareColumnValue(val, node.getProperty());
if (val != null) { if (value != null) {
HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter); values.add(Fun.Tuple2.of(node, value));
Object value = sessionOps.getValuePreparer().prepareColumnValue(val, node.getProperty()); }
}
if (value != null) { return this;
values.add(Fun.Tuple2.of(node, value)); }
}
}
return this; @Override
} public BuiltStatement buildStatement(boolean cached) {
@Override values.forEach(t -> addPropertyNode(t._1));
public BuiltStatement buildStatement(boolean cached) {
values.forEach(t -> addPropertyNode(t._1)); if (values.isEmpty())
return null;
if (values.isEmpty()) return null; if (entity == null) {
throw new HelenusMappingException("unknown entity");
}
if (entity == null) { Insert insert = QueryBuilder.insertInto(entity.getName().toCql());
throw new HelenusMappingException("unknown entity");
}
Insert insert = QueryBuilder.insertInto(entity.getName().toCql()); if (ifNotExists) {
insert.ifNotExists();
}
if (ifNotExists) { values.forEach(t -> {
insert.ifNotExists(); insert.value(t._1.getColumnName(), t._2);
} });
values.forEach( if (this.ttl != null) {
t -> { insert.using(QueryBuilder.ttl(this.ttl[0]));
insert.value(t._1.getColumnName(), t._2); }
}); if (this.timestamp != null) {
insert.using(QueryBuilder.timestamp(this.timestamp[0]));
}
if (this.ttl != null) { return insert;
insert.using(QueryBuilder.ttl(this.ttl[0])); }
}
if (this.timestamp != null) {
insert.using(QueryBuilder.timestamp(this.timestamp[0]));
}
return insert; @Override
} public T transform(ResultSet resultSet) {
Class<?> iface = entity.getMappingInterface();
if (resultType == iface) {
if (values.size() > 0) {
boolean immutable = iface.isAssignableFrom(Drafted.class);
Collection<HelenusProperty> properties = entity.getOrderedProperties();
Map<String, Object> backingMap = new HashMap<String, Object>(properties.size());
@Override // First, add all the inserted values into our new map.
public T transform(ResultSet resultSet) { values.forEach(t -> backingMap.put(t._1.getProperty().getPropertyName(), t._2));
Class<?> iface = entity.getMappingInterface();
if (resultType == iface) {
if (values.size() > 0) {
Collection<HelenusProperty> properties = entity.getOrderedProperties();
Map<String, Object> backingMap = new HashMap<String, Object>(properties.size());
// First, add all the inserted values into our new map. // Then, fill in all the rest of the properties.
values.forEach(t -> backingMap.put(t._1.getProperty().getPropertyName(), t._2)); for (HelenusProperty prop : properties) {
String key = prop.getPropertyName();
if (backingMap.containsKey(key)) {
// Some values man need to be converted (e.g. from String to Enum). This is done
// within the BeanColumnValueProvider below.
Optional<Function<Object, Object>> converter = prop
.getReadConverter(sessionOps.getSessionRepository());
if (converter.isPresent()) {
backingMap.put(key, converter.get().apply(backingMap.get(key)));
}
} else {
// If we started this operation with an instance of this type, use values from
// that.
if (pojo != null) {
backingMap.put(key,
BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, immutable));
} else {
// Otherwise we'll use default values for the property type if available.
Class<?> propType = prop.getJavaType();
if (propType.isPrimitive()) {
DefaultPrimitiveTypes type = DefaultPrimitiveTypes.lookup(propType);
if (type == null) {
throw new HelenusException("unknown primitive type " + propType);
}
backingMap.put(key, type.getDefaultValue());
}
}
}
}
// Then, fill in all the rest of the properties. // Lastly, create a new proxy object for the entity and return the new instance.
for (HelenusProperty prop : properties) { return (T) Helenus.map(iface, backingMap);
String key = prop.getPropertyName(); }
if (backingMap.containsKey(key)) { // Oddly, this insert didn't change any value so simply return the pojo.
// Some values man need to be converted (e.g. from String to Enum). This is done // TODO(gburd): this pojo is the result of a Draft.build() call which will not
// within the BeanColumnValueProvider below. // preserve object identity (o1 == o2), ... fix me.
Optional<Function<Object, Object>> converter = return (T) pojo;
prop.getReadConverter(sessionOps.getSessionRepository()); }
if (converter.isPresent()) { return (T) resultSet;
backingMap.put(key, converter.get().apply(backingMap.get(key))); }
}
} else {
// If we started this operation with an instance of this type, use values from that.
if (pojo != null) {
backingMap.put(key, BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
} else {
// Otherwise we'll use default values for the property type if available.
Class<?> propType = prop.getJavaType();
if (propType.isPrimitive()) {
DefaultPrimitiveTypes type = DefaultPrimitiveTypes.lookup(propType);
if (type == null) {
throw new HelenusException("unknown primitive type " + propType);
}
backingMap.put(key, type.getDefaultValue());
}
}
}
}
// Lastly, create a new proxy object for the entity and return the new instance. public InsertOperation<T> usingTtl(int ttl) {
return (T) Helenus.map(iface, backingMap); this.ttl = new int[1];
} this.ttl[0] = ttl;
// Oddly, this insert didn't change any value so simply return the pojo. return this;
// TODO(gburd): this pojo is the result of a Draft.build() call which will not preserve object identity (o1 == o2), ... fix me. }
return (T) pojo;
}
return (T) resultSet;
}
public InsertOperation<T> usingTtl(int ttl) { public InsertOperation<T> usingTimestamp(long timestamp) {
this.ttl = new int[1]; this.timestamp = new long[1];
this.ttl[0] = ttl; this.timestamp[0] = timestamp;
return this; return this;
} }
public InsertOperation<T> usingTimestamp(long timestamp) { private void addPropertyNode(HelenusPropertyNode p) {
this.timestamp = new long[1]; if (entity == null) {
this.timestamp[0] = timestamp; entity = p.getEntity();
return this; } else if (entity != p.getEntity()) {
} throw new HelenusMappingException("you can insert only single entity " + entity.getMappingInterface()
+ " or " + p.getEntity().getMappingInterface());
}
}
private void addPropertyNode(HelenusPropertyNode p) { @Override
if (entity == null) { public T sync(UnitOfWork uow) {// throws TimeoutException {
entity = p.getEntity(); if (uow == null) {
} else if (entity != p.getEntity()) { return sync();
throw new HelenusMappingException( }
"you can insert only single entity " T result = super.sync(uow);
+ entity.getMappingInterface() Class<?> iface = entity.getMappingInterface();
+ " or " if (resultType == iface) {
+ p.getEntity().getMappingInterface()); updateCache(uow, result, entity.getFacets());
} }
} return result;
}
@Override
public String getStatementCacheKey() {
List<String> keys = new ArrayList<>(values.size());
values.forEach(
t -> {
HelenusPropertyNode prop = t._1;
switch (prop.getProperty().getColumnType()) {
case PARTITION_KEY:
case CLUSTERING_COLUMN:
keys.add(prop.getColumnName() + "==" + t._2.toString());
break;
default:
break;
}
});
return entity.getName() + ": " + Joiner.on(",").join(keys);
}
@Override
public T sync(UnitOfWork uow) {
if (uow == null) {
return sync();
}
T result = super.sync(uow);
Class<?> iface = entity.getMappingInterface();
if (resultType == iface) {
String key = getStatementCacheKey();
if (key != null) {
Set<Object> set = new HashSet<Object>(1);
set.add(result);
uow.getCache().put(key, set);
}
}
return result;
}
} }

View file

@ -1,79 +1,111 @@
/*
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.helenus.core.operation; package net.helenus.core.operation;
import brave.Span; import java.util.List;
import brave.Tracer; import java.util.concurrent.TimeUnit;
import brave.propagation.TraceContext;
import com.codahale.metrics.Meter; import com.codahale.metrics.Meter;
import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Timer; import com.codahale.metrics.Timer;
import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.ResultSetFuture; import com.datastax.driver.core.ResultSetFuture;
import com.datastax.driver.core.Statement; import com.datastax.driver.core.Statement;
import java.util.concurrent.ExecutionException; import com.google.common.base.Stopwatch;
import brave.Span;
import brave.Tracer;
import brave.propagation.TraceContext;
import net.helenus.core.AbstractSessionOperations; import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.UnitOfWork; import net.helenus.core.UnitOfWork;
import net.helenus.core.cache.Facet;
public abstract class Operation<E> { public abstract class Operation<E> {
protected final AbstractSessionOperations sessionOps; protected final AbstractSessionOperations sessionOps;
protected final Meter uowCacheHits; protected final Meter uowCacheHits;
protected final Meter uowCacheMiss; protected final Meter uowCacheMiss;
protected final Timer requestLatency; protected final Timer requestLatency;
Operation(AbstractSessionOperations sessionOperations) { Operation(AbstractSessionOperations sessionOperations) {
this.sessionOps = sessionOperations; this.sessionOps = sessionOperations;
MetricRegistry metrics = sessionOperations.getMetricRegistry(); MetricRegistry metrics = sessionOperations.getMetricRegistry();
this.uowCacheHits = metrics.meter("net.helenus.UOW-cache-hits"); this.uowCacheHits = metrics.meter("net.helenus.UOW-cache-hits");
this.uowCacheMiss = metrics.meter("net.helenus.UOW-cache-miss"); this.uowCacheMiss = metrics.meter("net.helenus.UOW-cache-miss");
this.requestLatency = metrics.timer("net.helenus.request-latency"); this.requestLatency = metrics.timer("net.helenus.request-latency");
} }
public ResultSet execute( public ResultSet execute(AbstractSessionOperations session, UnitOfWork uow, TraceContext traceContext, long timeout,
AbstractSessionOperations session, TimeUnit units, boolean showValues, boolean cached) { // throws TimeoutException {
UnitOfWork uow,
TraceContext traceContext,
boolean showValues,
boolean cached) {
// Start recording in a Zipkin sub-span our execution time to perform this operation. // Start recording in a Zipkin sub-span our execution time to perform this
Tracer tracer = session.getZipkinTracer(); // operation.
Span span = null; Tracer tracer = session.getZipkinTracer();
if (tracer != null && traceContext != null) { Span span = null;
span = tracer.newChild(traceContext); if (tracer != null && traceContext != null) {
} span = tracer.newChild(traceContext);
}
try { try {
if (span != null) { if (span != null) {
span.name("cassandra"); span.name("cassandra");
span.start(); span.start();
} }
Statement statement = options(buildStatement(cached)); Statement statement = options(buildStatement(cached));
ResultSetFuture futureResultSet = session.executeAsync(statement, showValues); Stopwatch timer = null;
return futureResultSet.get(); if (uow != null) {
timer = uow.getExecutionTimer();
timer.start();
}
ResultSetFuture futureResultSet = session.executeAsync(statement, showValues);
ResultSet resultSet = futureResultSet.getUninterruptibly(); // TODO(gburd): (timeout, units);
} catch (InterruptedException | ExecutionException e) { if (uow != null)
timer.stop();
throw new RuntimeException(e); return resultSet;
} finally { } finally {
if (span != null) { if (span != null) {
span.finish(); span.finish();
} }
} }
} }
public Statement options(Statement statement) { public Statement options(Statement statement) {
return statement; return statement;
} }
public Statement buildStatement(boolean cached) { public Statement buildStatement(boolean cached) {
return null; return null;
} }
public List<Facet> getFacets() {
return null;
}
public List<Facet> bindFacetValues() {
return null;
}
public boolean isSessionCacheable() {
return false;
}
public String getStatementCacheKey() {
return null;
}
} }

View file

@ -20,27 +20,28 @@ import com.datastax.driver.core.PreparedStatement;
public final class PreparedOperation<E> { public final class PreparedOperation<E> {
private final PreparedStatement preparedStatement; private final PreparedStatement preparedStatement;
private final AbstractOperation<E, ?> operation; private final AbstractOperation<E, ?> operation;
public PreparedOperation(PreparedStatement statement, AbstractOperation<E, ?> operation) { public PreparedOperation(PreparedStatement statement, AbstractOperation<E, ?> operation) {
this.preparedStatement = statement; this.preparedStatement = statement;
this.operation = operation; this.operation = operation;
} }
public PreparedStatement getPreparedStatement() { public PreparedStatement getPreparedStatement() {
return preparedStatement; return preparedStatement;
} }
public BoundOperation<E> bind(Object... params) { public BoundOperation<E> bind(Object... params) {
BoundStatement boundStatement = preparedStatement.bind(params); BoundStatement boundStatement = preparedStatement.bind(params);
return new BoundOperation<E>(boundStatement, operation); return new BoundOperation<E>(boundStatement, operation);
} }
@Override
public String toString() {
return preparedStatement.getQueryString();
}
@Override
public String toString() {
return preparedStatement.getQueryString();
}
} }

View file

@ -20,28 +20,27 @@ import com.datastax.driver.core.PreparedStatement;
public final class PreparedOptionalOperation<E> { public final class PreparedOptionalOperation<E> {
private final PreparedStatement preparedStatement; private final PreparedStatement preparedStatement;
private final AbstractOptionalOperation<E, ?> operation; private final AbstractOptionalOperation<E, ?> operation;
public PreparedOptionalOperation( public PreparedOptionalOperation(PreparedStatement statement, AbstractOptionalOperation<E, ?> operation) {
PreparedStatement statement, AbstractOptionalOperation<E, ?> operation) { this.preparedStatement = statement;
this.preparedStatement = statement; this.operation = operation;
this.operation = operation; }
}
public PreparedStatement getPreparedStatement() { public PreparedStatement getPreparedStatement() {
return preparedStatement; return preparedStatement;
} }
public BoundOptionalOperation<E> bind(Object... params) { public BoundOptionalOperation<E> bind(Object... params) {
BoundStatement boundStatement = preparedStatement.bind(params); BoundStatement boundStatement = preparedStatement.bind(params);
return new BoundOptionalOperation<E>(boundStatement, operation); return new BoundOptionalOperation<E>(boundStatement, operation);
} }
@Override @Override
public String toString() { public String toString() {
return preparedStatement.getQueryString(); return preparedStatement.getQueryString();
} }
} }

View file

@ -20,26 +20,25 @@ import com.datastax.driver.core.PreparedStatement;
public final class PreparedStreamOperation<E> { public final class PreparedStreamOperation<E> {
private final PreparedStatement preparedStatement; private final PreparedStatement preparedStatement;
private final AbstractStreamOperation<E, ?> operation; private final AbstractStreamOperation<E, ?> operation;
public PreparedStreamOperation( public PreparedStreamOperation(PreparedStatement statement, AbstractStreamOperation<E, ?> operation) {
PreparedStatement statement, AbstractStreamOperation<E, ?> operation) { this.preparedStatement = statement;
this.preparedStatement = statement; this.operation = operation;
this.operation = operation; }
}
public PreparedStatement getPreparedStatement() { public PreparedStatement getPreparedStatement() {
return preparedStatement; return preparedStatement;
} }
public BoundStreamOperation<E> bind(Object... params) { public BoundStreamOperation<E> bind(Object... params) {
BoundStatement boundStatement = preparedStatement.bind(params); BoundStatement boundStatement = preparedStatement.bind(params);
return new BoundStreamOperation<E>(boundStatement, operation); return new BoundStreamOperation<E>(boundStatement, operation);
} }
@Override @Override
public String toString() { public String toString() {
return preparedStatement.getQueryString(); return preparedStatement.getQueryString();
} }
} }

View file

@ -15,40 +15,53 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import com.datastax.driver.core.ResultSet; import java.util.List;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import java.util.Optional; import java.util.Optional;
import java.util.function.Function; import java.util.function.Function;
public final class SelectFirstOperation<E> import com.datastax.driver.core.ResultSet;
extends AbstractFilterOptionalOperation<E, SelectFirstOperation<E>> { import com.datastax.driver.core.querybuilder.BuiltStatement;
private final SelectOperation<E> delegate; import net.helenus.core.cache.Facet;
public SelectFirstOperation(SelectOperation<E> delegate) { public final class SelectFirstOperation<E> extends AbstractFilterOptionalOperation<E, SelectFirstOperation<E>> {
super(delegate.sessionOps);
this.delegate = delegate; private final SelectOperation<E> delegate;
this.filters = delegate.filters;
this.ifFilters = delegate.ifFilters;
}
public <R> SelectFirstTransformingOperation<R, E> map(Function<E, R> fn) { public SelectFirstOperation(SelectOperation<E> delegate) {
return new SelectFirstTransformingOperation<R, E>(delegate, fn); super(delegate.sessionOps);
}
@Override this.delegate = delegate;
public String getStatementCacheKey() { this.filters = delegate.filters;
return delegate.getStatementCacheKey(); this.ifFilters = delegate.ifFilters;
} }
@Override public <R> SelectFirstTransformingOperation<R, E> map(Function<E, R> fn) {
public BuiltStatement buildStatement(boolean cached) { return new SelectFirstTransformingOperation<R, E>(delegate, fn);
return delegate.buildStatement(cached); }
}
@Override @Override
public Optional<E> transform(ResultSet resultSet) { public BuiltStatement buildStatement(boolean cached) {
return delegate.transform(resultSet).findFirst(); return delegate.buildStatement(cached);
} }
@Override
public List<Facet> getFacets() {
return delegate.getFacets();
}
@Override
public List<Facet> bindFacetValues() {
return delegate.bindFacetValues();
}
@Override
public Optional<E> transform(ResultSet resultSet) {
return delegate.transform(resultSet).findFirst();
}
@Override
public boolean isSessionCacheable() {
return delegate.isSessionCacheable();
}
} }

View file

@ -15,38 +15,48 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import com.datastax.driver.core.ResultSet; import java.util.List;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import java.util.Optional; import java.util.Optional;
import java.util.function.Function; import java.util.function.Function;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import net.helenus.core.cache.Facet;
public final class SelectFirstTransformingOperation<R, E> public final class SelectFirstTransformingOperation<R, E>
extends AbstractFilterOptionalOperation<R, SelectFirstTransformingOperation<R, E>> { extends
AbstractFilterOptionalOperation<R, SelectFirstTransformingOperation<R, E>> {
private final SelectOperation<E> delegate; private final SelectOperation<E> delegate;
private final Function<E, R> fn; private final Function<E, R> fn;
public SelectFirstTransformingOperation(SelectOperation<E> delegate, Function<E, R> fn) { public SelectFirstTransformingOperation(SelectOperation<E> delegate, Function<E, R> fn) {
super(delegate.sessionOps); super(delegate.sessionOps);
this.delegate = delegate; this.delegate = delegate;
this.fn = fn; this.fn = fn;
this.filters = delegate.filters; this.filters = delegate.filters;
this.ifFilters = delegate.ifFilters; this.ifFilters = delegate.ifFilters;
} }
@Override @Override
public String getStatementCacheKey() { public List<Facet> bindFacetValues() {
return delegate.getStatementCacheKey(); return delegate.bindFacetValues();
} }
@Override @Override
public BuiltStatement buildStatement(boolean cached) { public BuiltStatement buildStatement(boolean cached) {
return delegate.buildStatement(cached); return delegate.buildStatement(cached);
} }
@Override @Override
public Optional<R> transform(ResultSet resultSet) { public Optional<R> transform(ResultSet resultSet) {
return delegate.transform(resultSet).findFirst().map(fn); return delegate.transform(resultSet).findFirst().map(fn);
} }
@Override
public boolean isSessionCacheable() {
return delegate.isSessionCacheable();
}
} }

View file

@ -15,6 +15,14 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import java.util.*;
import java.util.function.Function;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.Row; import com.datastax.driver.core.Row;
import com.datastax.driver.core.querybuilder.BuiltStatement; import com.datastax.driver.core.querybuilder.BuiltStatement;
@ -23,13 +31,11 @@ import com.datastax.driver.core.querybuilder.QueryBuilder;
import com.datastax.driver.core.querybuilder.Select; import com.datastax.driver.core.querybuilder.Select;
import com.datastax.driver.core.querybuilder.Select.Selection; import com.datastax.driver.core.querybuilder.Select.Selection;
import com.datastax.driver.core.querybuilder.Select.Where; import com.datastax.driver.core.querybuilder.Select.Where;
import com.google.common.base.Joiner;
import com.google.common.collect.Iterables; import com.google.common.collect.Iterables;
import java.util.*;
import java.util.function.Function;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import net.helenus.core.*; import net.helenus.core.*;
import net.helenus.core.cache.Facet;
import net.helenus.core.cache.UnboundFacet;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.MappingUtil; import net.helenus.mapping.MappingUtil;
@ -41,283 +47,278 @@ import net.helenus.support.HelenusMappingException;
public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, SelectOperation<E>> { public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, SelectOperation<E>> {
protected Function<Row, E> rowMapper = null; private static final Logger LOG = LoggerFactory.getLogger(SelectOperation.class);
protected final List<HelenusPropertyNode> props = new ArrayList<HelenusPropertyNode>();
protected List<Ordering> ordering = null; protected final List<HelenusPropertyNode> props = new ArrayList<HelenusPropertyNode>();
protected Integer limit = null; protected Function<Row, E> rowMapper = null;
protected boolean allowFiltering = false; protected List<Ordering> ordering = null;
protected String alternateTableName = null; protected Integer limit = null;
protected boolean allowFiltering = false;
protected String alternateTableName = null;
protected boolean isCacheable = false;
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public SelectOperation(AbstractSessionOperations sessionOperations) { public SelectOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations); super(sessionOperations);
this.rowMapper = this.rowMapper = new Function<Row, E>() {
new Function<Row, E>() {
@Override @Override
public E apply(Row source) { public E apply(Row source) {
ColumnValueProvider valueProvider = sessionOps.getValueProvider(); ColumnValueProvider valueProvider = sessionOps.getValueProvider();
Object[] arr = new Object[props.size()]; Object[] arr = new Object[props.size()];
int i = 0; int i = 0;
for (HelenusPropertyNode p : props) { for (HelenusPropertyNode p : props) {
Object value = valueProvider.getColumnValue(source, -1, p.getProperty()); Object value = valueProvider.getColumnValue(source, -1, p.getProperty());
arr[i++] = value; arr[i++] = value;
} }
return (E) Fun.ArrayTuple.of(arr); return (E) Fun.ArrayTuple.of(arr);
} }
}; };
} }
public SelectOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity) { public SelectOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity) {
super(sessionOperations); super(sessionOperations);
entity entity.getOrderedProperties().stream().map(p -> new HelenusPropertyNode(p, Optional.empty()))
.getOrderedProperties() .forEach(p -> this.props.add(p));
.stream()
.map(p -> new HelenusPropertyNode(p, Optional.empty()))
.forEach(p -> this.props.add(p));
}
public SelectOperation( isCacheable = entity.isCacheable();
AbstractSessionOperations sessionOperations, }
HelenusEntity entity,
Function<Row, E> rowMapper) {
super(sessionOperations); public SelectOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity,
this.rowMapper = rowMapper; Function<Row, E> rowMapper) {
entity super(sessionOperations);
.getOrderedProperties() this.rowMapper = rowMapper;
.stream()
.map(p -> new HelenusPropertyNode(p, Optional.empty()))
.forEach(p -> this.props.add(p));
}
public SelectOperation( entity.getOrderedProperties().stream().map(p -> new HelenusPropertyNode(p, Optional.empty()))
AbstractSessionOperations sessionOperations, .forEach(p -> this.props.add(p));
Function<Row, E> rowMapper,
HelenusPropertyNode... props) {
super(sessionOperations); isCacheable = entity.isCacheable();
}
this.rowMapper = rowMapper; public SelectOperation(AbstractSessionOperations sessionOperations, Function<Row, E> rowMapper,
Collections.addAll(this.props, props); HelenusPropertyNode... props) {
}
public CountOperation count() { super(sessionOperations);
HelenusEntity entity = null; this.rowMapper = rowMapper;
for (HelenusPropertyNode prop : props) { Collections.addAll(this.props, props);
}
if (entity == null) { public CountOperation count() {
entity = prop.getEntity();
} else if (entity != prop.getEntity()) {
throw new HelenusMappingException(
"you can count records only from a single entity "
+ entity.getMappingInterface()
+ " or "
+ prop.getEntity().getMappingInterface());
}
}
return new CountOperation(sessionOps, entity); HelenusEntity entity = null;
} for (HelenusPropertyNode prop : props) {
public <V extends E> SelectOperation<E> from(Class<V> materializedViewClass) { if (entity == null) {
Objects.requireNonNull(materializedViewClass); entity = prop.getEntity();
HelenusEntity entity = Helenus.entity(materializedViewClass); } else if (entity != prop.getEntity()) {
this.alternateTableName = entity.getName().toCql(); throw new HelenusMappingException("you can count records only from a single entity "
this.allowFiltering = true; + entity.getMappingInterface() + " or " + prop.getEntity().getMappingInterface());
return this; }
} }
public SelectOperation<E> from(String alternateTableName) { return new CountOperation(sessionOps, entity);
this.alternateTableName = alternateTableName; }
return this;
}
public SelectFirstOperation<E> single() { public <V extends E> SelectOperation<E> from(Class<V> materializedViewClass) {
limit(1); Objects.requireNonNull(materializedViewClass);
return new SelectFirstOperation<E>(this); HelenusEntity entity = Helenus.entity(materializedViewClass);
} this.alternateTableName = entity.getName().toCql();
this.props.clear();
entity.getOrderedProperties().stream().map(p -> new HelenusPropertyNode(p, Optional.empty()))
.forEach(p -> this.props.add(p));
return this;
}
public <R> SelectTransformingOperation<R, E> mapTo(Class<R> entityClass) { public SelectFirstOperation<E> single() {
limit(1);
return new SelectFirstOperation<E>(this);
}
Objects.requireNonNull(entityClass, "entityClass is null"); public <R> SelectTransformingOperation<R, E> mapTo(Class<R> entityClass) {
HelenusEntity entity = Helenus.entity(entityClass); Objects.requireNonNull(entityClass, "entityClass is null");
this.rowMapper = null; HelenusEntity entity = Helenus.entity(entityClass);
return new SelectTransformingOperation<R, E>( this.rowMapper = null;
this,
(r) -> {
Map<String, Object> map = new ValueProviderMap(r, sessionOps.getValueProvider(), entity);
return (R) Helenus.map(entityClass, map);
});
}
public <R> SelectTransformingOperation<R, E> map(Function<E, R> fn) { return new SelectTransformingOperation<R, E>(this, (r) -> {
return new SelectTransformingOperation<R, E>(this, fn); Map<String, Object> map = new ValueProviderMap(r, sessionOps.getValueProvider(), entity);
} return (R) Helenus.map(entityClass, map);
});
}
public SelectOperation<E> column(Getter<?> getter) { public <R> SelectTransformingOperation<R, E> map(Function<E, R> fn) {
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(getter); return new SelectTransformingOperation<R, E>(this, fn);
this.props.add(p); }
return this;
}
public SelectOperation<E> orderBy(Getter<?> getter, OrderingDirection direction) { public SelectOperation<E> column(Getter<?> getter) {
getOrCreateOrdering().add(new Ordered(getter, direction).getOrdering()); HelenusPropertyNode p = MappingUtil.resolveMappingProperty(getter);
return this; this.props.add(p);
} return this;
}
public SelectOperation<E> orderBy(Ordered ordered) { public SelectOperation<E> orderBy(Getter<?> getter, OrderingDirection direction) {
getOrCreateOrdering().add(ordered.getOrdering()); getOrCreateOrdering().add(new Ordered(getter, direction).getOrdering());
return this; return this;
} }
public SelectOperation<E> limit(Integer limit) { public SelectOperation<E> orderBy(Ordered ordered) {
this.limit = limit; getOrCreateOrdering().add(ordered.getOrdering());
return this; return this;
} }
public SelectOperation<E> allowFiltering() { public SelectOperation<E> limit(Integer limit) {
this.allowFiltering = true; this.limit = limit;
return this; return this;
} }
@Override public SelectOperation<E> allowFiltering() {
public String getStatementCacheKey() { this.allowFiltering = true;
List<String> keys = new ArrayList<>(filters.size()); return this;
HelenusEntity entity = props.get(0).getEntity(); }
for (HelenusPropertyNode prop : props) { @Override
switch (prop.getProperty().getColumnType()) { public boolean isSessionCacheable() {
case PARTITION_KEY: return isCacheable;
case CLUSTERING_COLUMN: }
Filter filter = filters.get(prop.getProperty());
if (filter != null) {
keys.add(filter.toString());
} else {
return null;
}
break;
default:
if (keys.size() > 0) {
return entity.getName() + ": " + Joiner.on(",").join(keys);
}
return null;
}
}
return null;
}
@Override @Override
public BuiltStatement buildStatement(boolean cached) { public List<Facet> getFacets() {
HelenusEntity entity = props.get(0).getEntity();
return entity.getFacets();
}
HelenusEntity entity = null; @Override
Selection selection = QueryBuilder.select(); public List<Facet> bindFacetValues() {
HelenusEntity entity = props.get(0).getEntity();
List<Facet> boundFacets = new ArrayList<>();
for (HelenusPropertyNode prop : props) { for (Facet facet : entity.getFacets()) {
String columnName = prop.getColumnName(); if (facet instanceof UnboundFacet) {
selection = selection.column(columnName); UnboundFacet unboundFacet = (UnboundFacet) facet;
UnboundFacet.Binder binder = unboundFacet.binder();
unboundFacet.getProperties().forEach(prop -> {
Filter filter = filters.get(prop);
if (filter != null) {
Object[] postulates = filter.postulateValues();
for (Object p : postulates) {
binder.setValueForProperty(prop, p.toString());
}
}
if (prop.getProperty().caseSensitiveIndex()) { });
allowFiltering = true; if (binder.isBound()) {
} boundFacets.add(binder.bind());
}
} else {
boundFacets.add(facet);
}
}
return boundFacets;
}
if (entity == null) { @Override
entity = prop.getEntity(); public BuiltStatement buildStatement(boolean cached) {
} else if (entity != prop.getEntity()) {
throw new HelenusMappingException(
"you can select columns only from a single entity "
+ entity.getMappingInterface()
+ " or "
+ prop.getEntity().getMappingInterface());
}
if (cached) { HelenusEntity entity = null;
switch (prop.getProperty().getColumnType()) { Selection selection = QueryBuilder.select();
case PARTITION_KEY:
case CLUSTERING_COLUMN:
break;
default:
if (entity.equals(prop.getEntity())) {
if (prop.getNext().isPresent()) {
columnName = Iterables.getLast(prop).getColumnName().toCql(true);
}
if (!prop.getProperty().getDataType().isCollectionType()) {
selection.writeTime(columnName).as(columnName + "_writeTime");
selection.ttl(columnName).as(columnName + "_ttl");
}
}
break;
}
}
}
if (entity == null) { for (HelenusPropertyNode prop : props) {
throw new HelenusMappingException("no entity or table to select data"); String columnName = prop.getColumnName();
} selection = selection.column(columnName);
String tableName = alternateTableName == null ? entity.getName().toCql() : alternateTableName; if (prop.getProperty().caseSensitiveIndex()) {
Select select = selection.from(tableName); allowFiltering = true;
}
if (ordering != null && !ordering.isEmpty()) { if (entity == null) {
select.orderBy(ordering.toArray(new Ordering[ordering.size()])); entity = prop.getEntity();
} } else if (entity != prop.getEntity()) {
throw new HelenusMappingException("you can select columns only from a single entity "
+ entity.getMappingInterface() + " or " + prop.getEntity().getMappingInterface());
}
if (limit != null) { if (cached) {
select.limit(limit); switch (prop.getProperty().getColumnType()) {
} case PARTITION_KEY :
case CLUSTERING_COLUMN :
break;
default :
if (entity.equals(prop.getEntity())) {
if (prop.getNext().isPresent()) {
columnName = Iterables.getLast(prop).getColumnName().toCql(true);
}
if (!prop.getProperty().getDataType().isCollectionType()) {
selection.writeTime(columnName).as(columnName + "_writeTime");
selection.ttl(columnName).as(columnName + "_ttl");
}
}
break;
}
}
}
if (filters != null && !filters.isEmpty()) { if (entity == null) {
throw new HelenusMappingException("no entity or table to select data");
}
Where where = select.where(); String tableName = alternateTableName == null ? entity.getName().toCql() : alternateTableName;
Select select = selection.from(tableName);
for (Filter<?> filter : filters.values()) { if (ordering != null && !ordering.isEmpty()) {
where.and(filter.getClause(sessionOps.getValuePreparer())); select.orderBy(ordering.toArray(new Ordering[ordering.size()]));
} }
}
if (ifFilters != null && !ifFilters.isEmpty()) { if (limit != null) {
logger.error( select.limit(limit);
"onlyIf conditions " + ifFilters + " would be ignored in the statement " + select); }
}
if (allowFiltering) { if (filters != null && !filters.isEmpty()) {
select.allowFiltering();
}
return select; Where where = select.where();
}
@SuppressWarnings("unchecked") for (Filter<?> filter : filters.values()) {
@Override where.and(filter.getClause(sessionOps.getValuePreparer()));
public Stream<E> transform(ResultSet resultSet) { }
if (rowMapper != null) { }
return StreamSupport.stream(
Spliterators.spliteratorUnknownSize(resultSet.iterator(), Spliterator.ORDERED), false)
.map(rowMapper);
} else {
return (Stream<E>)
StreamSupport.stream(
Spliterators.spliteratorUnknownSize(resultSet.iterator(), Spliterator.ORDERED),
false);
}
}
private List<Ordering> getOrCreateOrdering() { if (ifFilters != null && !ifFilters.isEmpty()) {
if (ordering == null) { LOG.error("onlyIf conditions " + ifFilters + " would be ignored in the statement " + select);
ordering = new ArrayList<Ordering>(); }
}
return ordering; if (allowFiltering) {
} select.allowFiltering();
}
return select;
}
@SuppressWarnings("unchecked")
@Override
public Stream<E> transform(ResultSet resultSet) {
if (rowMapper != null) {
return StreamSupport
.stream(Spliterators.spliteratorUnknownSize(resultSet.iterator(), Spliterator.ORDERED), false)
.map(rowMapper);
} else {
return (Stream<E>) StreamSupport
.stream(Spliterators.spliteratorUnknownSize(resultSet.iterator(), Spliterator.ORDERED), false);
}
}
private List<Ordering> getOrCreateOrdering() {
if (ordering == null) {
ordering = new ArrayList<Ordering>();
}
return ordering;
}
} }

View file

@ -15,38 +15,48 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import com.datastax.driver.core.ResultSet; import java.util.List;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import java.util.function.Function; import java.util.function.Function;
import java.util.stream.Stream; import java.util.stream.Stream;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import net.helenus.core.cache.Facet;
public final class SelectTransformingOperation<R, E> public final class SelectTransformingOperation<R, E>
extends AbstractFilterStreamOperation<R, SelectTransformingOperation<R, E>> { extends
AbstractFilterStreamOperation<R, SelectTransformingOperation<R, E>> {
private final SelectOperation<E> delegate; private final SelectOperation<E> delegate;
private final Function<E, R> fn; private final Function<E, R> fn;
public SelectTransformingOperation(SelectOperation<E> delegate, Function<E, R> fn) { public SelectTransformingOperation(SelectOperation<E> delegate, Function<E, R> fn) {
super(delegate.sessionOps); super(delegate.sessionOps);
this.delegate = delegate; this.delegate = delegate;
this.fn = fn; this.fn = fn;
this.filters = delegate.filters; this.filters = delegate.filters;
this.ifFilters = delegate.ifFilters; this.ifFilters = delegate.ifFilters;
} }
@Override @Override
public String getStatementCacheKey() { public List<Facet> bindFacetValues() {
return delegate.getStatementCacheKey(); return delegate.bindFacetValues();
} }
@Override @Override
public BuiltStatement buildStatement(boolean cached) { public List<Facet> getFacets() {
return delegate.buildStatement(cached); return delegate.getFacets();
} }
@Override @Override
public Stream<R> transform(ResultSet resultSet) { public BuiltStatement buildStatement(boolean cached) {
return delegate.transform(resultSet).map(fn); return delegate.buildStatement(cached);
} }
@Override
public Stream<R> transform(ResultSet resultSet) {
return delegate.transform(resultSet).map(fn);
}
} }

View file

@ -15,13 +15,15 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import java.util.*;
import java.util.function.Function;
import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.Assignment; import com.datastax.driver.core.querybuilder.Assignment;
import com.datastax.driver.core.querybuilder.BuiltStatement; import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.datastax.driver.core.querybuilder.QueryBuilder; import com.datastax.driver.core.querybuilder.QueryBuilder;
import com.datastax.driver.core.querybuilder.Update; import com.datastax.driver.core.querybuilder.Update;
import java.util.*;
import java.util.function.Function;
import net.helenus.core.*; import net.helenus.core.*;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
@ -32,565 +34,549 @@ import net.helenus.support.Immutables;
public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateOperation<E>> { public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateOperation<E>> {
private HelenusEntity entity = null; private final List<Assignment> assignments = new ArrayList<Assignment>();
private final AbstractEntityDraft<E> draft;
private final Map<String, Object> draftMap;
private HelenusEntity entity = null;
private int[] ttl;
private long[] timestamp;
private final List<Assignment> assignments = new ArrayList<Assignment>(); public UpdateOperation(AbstractSessionOperations sessionOperations) {
private final AbstractEntityDraft<E> draft; super(sessionOperations);
private final Map<String, Object> draftMap; this.draft = null;
this.draftMap = null;
}
private int[] ttl; public UpdateOperation(AbstractSessionOperations sessionOperations, AbstractEntityDraft<E> draft) {
private long[] timestamp; super(sessionOperations);
this.draft = draft;
this.draftMap = draft.toMap();
}
public UpdateOperation(AbstractSessionOperations sessionOperations) { public UpdateOperation(AbstractSessionOperations sessionOperations, HelenusPropertyNode p, Object v) {
super(sessionOperations); super(sessionOperations);
this.draft = null; this.draft = null;
this.draftMap = null; this.draftMap = null;
}
public UpdateOperation( Object value = sessionOps.getValuePreparer().prepareColumnValue(v, p.getProperty());
AbstractSessionOperations sessionOperations, AbstractEntityDraft<E> draft) { assignments.add(QueryBuilder.set(p.getColumnName(), value));
super(sessionOperations);
this.draft = draft;
this.draftMap = draft.toMap();
}
public UpdateOperation( addPropertyNode(p);
AbstractSessionOperations sessionOperations, HelenusPropertyNode p, Object v) { }
super(sessionOperations);
this.draft = null;
this.draftMap = null;
Object value = sessionOps.getValuePreparer().prepareColumnValue(v, p.getProperty()); public <V> UpdateOperation<E> set(Getter<V> getter, V v) {
assignments.add(QueryBuilder.set(p.getColumnName(), value)); Objects.requireNonNull(getter, "getter is empty");
addPropertyNode(p); HelenusPropertyNode p = MappingUtil.resolveMappingProperty(getter);
}
public <V> UpdateOperation<E> set(Getter<V> getter, V v) { Object value = sessionOps.getValuePreparer().prepareColumnValue(v, p.getProperty());
Objects.requireNonNull(getter, "getter is empty"); assignments.add(QueryBuilder.set(p.getColumnName(), value));
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(getter); addPropertyNode(p);
Object value = sessionOps.getValuePreparer().prepareColumnValue(v, p.getProperty()); return this;
assignments.add(QueryBuilder.set(p.getColumnName(), value)); }
addPropertyNode(p); /*
*
*
* COUNTER
*
*
*/
return this; public <V> UpdateOperation<E> increment(Getter<V> counterGetter) {
} return increment(counterGetter, 1L);
}
/* public <V> UpdateOperation<E> increment(Getter<V> counterGetter, long delta) {
*
*
* COUNTER
*
*
*/
public <V> UpdateOperation<E> increment(Getter<V> counterGetter) { Objects.requireNonNull(counterGetter, "counterGetter is empty");
return increment(counterGetter, 1L);
}
public <V> UpdateOperation<E> increment(Getter<V> counterGetter, long delta) { HelenusPropertyNode p = MappingUtil.resolveMappingProperty(counterGetter);
Objects.requireNonNull(counterGetter, "counterGetter is empty"); assignments.add(QueryBuilder.incr(p.getColumnName(), delta));
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(counterGetter); addPropertyNode(p);
assignments.add(QueryBuilder.incr(p.getColumnName(), delta)); if (draft != null) {
String key = p.getProperty().getPropertyName();
draftMap.put(key, (Long) draftMap.get(key) + delta);
}
addPropertyNode(p); return this;
}
if (draft != null) { public <V> UpdateOperation<E> decrement(Getter<V> counterGetter) {
String key = p.getProperty().getPropertyName(); return decrement(counterGetter, 1L);
draftMap.put(key, (Long) draftMap.get(key) + delta); }
}
return this; public <V> UpdateOperation<E> decrement(Getter<V> counterGetter, long delta) {
}
public <V> UpdateOperation<E> decrement(Getter<V> counterGetter) { Objects.requireNonNull(counterGetter, "counterGetter is empty");
return decrement(counterGetter, 1L);
}
public <V> UpdateOperation<E> decrement(Getter<V> counterGetter, long delta) { HelenusPropertyNode p = MappingUtil.resolveMappingProperty(counterGetter);
Objects.requireNonNull(counterGetter, "counterGetter is empty"); assignments.add(QueryBuilder.decr(p.getColumnName(), delta));
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(counterGetter); addPropertyNode(p);
assignments.add(QueryBuilder.decr(p.getColumnName(), delta)); if (draft != null) {
String key = p.getProperty().getPropertyName();
draftMap.put(key, (Long) draftMap.get(key) - delta);
}
addPropertyNode(p); return this;
}
if (draft != null) { /*
String key = p.getProperty().getPropertyName(); *
draftMap.put(key, (Long) draftMap.get(key) - delta); *
} * LIST
*
*/
return this; public <V> UpdateOperation<E> prepend(Getter<List<V>> listGetter, V value) {
}
/* Objects.requireNonNull(listGetter, "listGetter is empty");
* Objects.requireNonNull(value, "value is empty");
*
* LIST
*
*/
public <V> UpdateOperation<E> prepend(Getter<List<V>> listGetter, V value) { HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
Object valueObj = prepareSingleListValue(p, value);
Objects.requireNonNull(listGetter, "listGetter is empty"); assignments.add(QueryBuilder.prepend(p.getColumnName(), valueObj));
Objects.requireNonNull(value, "value is empty");
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter); addPropertyNode(p);
Object valueObj = prepareSingleListValue(p, value);
assignments.add(QueryBuilder.prepend(p.getColumnName(), valueObj)); if (draft != null) {
String key = p.getProperty().getPropertyName();
List<V> list = (List<V>) draftMap.get(key);
list.add(0, value);
}
addPropertyNode(p); return this;
}
if (draft != null) { public <V> UpdateOperation<E> prependAll(Getter<List<V>> listGetter, List<V> value) {
String key = p.getProperty().getPropertyName();
List<V> list = (List<V>) draftMap.get(key);
list.add(0, value);
}
return this; Objects.requireNonNull(listGetter, "listGetter is empty");
} Objects.requireNonNull(value, "value is empty");
public <V> UpdateOperation<E> prependAll(Getter<List<V>> listGetter, List<V> value) { HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
List valueObj = prepareListValue(p, value);
Objects.requireNonNull(listGetter, "listGetter is empty"); assignments.add(QueryBuilder.prependAll(p.getColumnName(), valueObj));
Objects.requireNonNull(value, "value is empty");
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter); addPropertyNode(p);
List valueObj = prepareListValue(p, value);
assignments.add(QueryBuilder.prependAll(p.getColumnName(), valueObj)); if (draft != null && value.size() > 0) {
String key = p.getProperty().getPropertyName();
List<V> list = (List<V>) draftMap.get(key);
list.addAll(0, value);
}
addPropertyNode(p); return this;
}
if (draft != null && value.size() > 0) { public <V> UpdateOperation<E> setIdx(Getter<List<V>> listGetter, int idx, V value) {
String key = p.getProperty().getPropertyName();
List<V> list = (List<V>) draftMap.get(key);
list.addAll(0, value);
}
return this; Objects.requireNonNull(listGetter, "listGetter is empty");
} Objects.requireNonNull(value, "value is empty");
public <V> UpdateOperation<E> setIdx(Getter<List<V>> listGetter, int idx, V value) { HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
Object valueObj = prepareSingleListValue(p, value);
Objects.requireNonNull(listGetter, "listGetter is empty"); assignments.add(QueryBuilder.setIdx(p.getColumnName(), idx, valueObj));
Objects.requireNonNull(value, "value is empty");
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter); addPropertyNode(p);
Object valueObj = prepareSingleListValue(p, value);
assignments.add(QueryBuilder.setIdx(p.getColumnName(), idx, valueObj)); if (draft != null) {
String key = p.getProperty().getPropertyName();
List<V> list = (List<V>) draftMap.get(key);
if (idx < 0) {
list.add(0, value);
} else if (idx > list.size()) {
list.add(list.size(), value);
} else {
list.add(idx, value);
}
list.add(0, value);
}
addPropertyNode(p); return this;
}
if (draft != null) { public <V> UpdateOperation<E> append(Getter<List<V>> listGetter, V value) {
String key = p.getProperty().getPropertyName();
List<V> list = (List<V>) draftMap.get(key);
if (idx < 0) {
list.add(0, value);
} else if (idx > list.size()) {
list.add(list.size(), value);
} else {
list.add(idx, value);
}
list.add(0, value);
}
return this; Objects.requireNonNull(listGetter, "listGetter is empty");
} Objects.requireNonNull(value, "value is empty");
public <V> UpdateOperation<E> append(Getter<List<V>> listGetter, V value) { HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
Object valueObj = prepareSingleListValue(p, value);
Objects.requireNonNull(listGetter, "listGetter is empty"); assignments.add(QueryBuilder.append(p.getColumnName(), valueObj));
Objects.requireNonNull(value, "value is empty");
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter); addPropertyNode(p);
Object valueObj = prepareSingleListValue(p, value);
assignments.add(QueryBuilder.append(p.getColumnName(), valueObj)); if (draft != null) {
String key = p.getProperty().getPropertyName();
List<V> list = (List<V>) draftMap.get(key);
list.add(value);
}
addPropertyNode(p); return this;
}
if (draft != null) { public <V> UpdateOperation<E> appendAll(Getter<List<V>> listGetter, List<V> value) {
String key = p.getProperty().getPropertyName();
List<V> list = (List<V>) draftMap.get(key);
list.add(value);
}
return this; Objects.requireNonNull(listGetter, "listGetter is empty");
} Objects.requireNonNull(value, "value is empty");
public <V> UpdateOperation<E> appendAll(Getter<List<V>> listGetter, List<V> value) { HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
List valueObj = prepareListValue(p, value);
Objects.requireNonNull(listGetter, "listGetter is empty"); assignments.add(QueryBuilder.appendAll(p.getColumnName(), valueObj));
Objects.requireNonNull(value, "value is empty");
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter); addPropertyNode(p);
List valueObj = prepareListValue(p, value);
assignments.add(QueryBuilder.appendAll(p.getColumnName(), valueObj)); if (draft != null && value.size() > 0) {
String key = p.getProperty().getPropertyName();
List<V> list = (List<V>) draftMap.get(key);
list.addAll(value);
}
addPropertyNode(p); return this;
}
if (draft != null && value.size() > 0) { public <V> UpdateOperation<E> discard(Getter<List<V>> listGetter, V value) {
String key = p.getProperty().getPropertyName();
List<V> list = (List<V>) draftMap.get(key);
list.addAll(value);
}
return this; Objects.requireNonNull(listGetter, "listGetter is empty");
} Objects.requireNonNull(value, "value is empty");
public <V> UpdateOperation<E> discard(Getter<List<V>> listGetter, V value) { HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
Object valueObj = prepareSingleListValue(p, value);
Objects.requireNonNull(listGetter, "listGetter is empty"); assignments.add(QueryBuilder.discard(p.getColumnName(), valueObj));
Objects.requireNonNull(value, "value is empty");
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter); addPropertyNode(p);
Object valueObj = prepareSingleListValue(p, value);
assignments.add(QueryBuilder.discard(p.getColumnName(), valueObj)); if (draft != null) {
String key = p.getProperty().getPropertyName();
List<V> list = (List<V>) draftMap.get(key);
list.remove(value);
}
addPropertyNode(p); return this;
}
if (draft != null) { public <V> UpdateOperation<E> discardAll(Getter<List<V>> listGetter, List<V> value) {
String key = p.getProperty().getPropertyName();
List<V> list = (List<V>) draftMap.get(key);
list.remove(value);
}
return this; Objects.requireNonNull(listGetter, "listGetter is empty");
} Objects.requireNonNull(value, "value is empty");
public <V> UpdateOperation<E> discardAll(Getter<List<V>> listGetter, List<V> value) { HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
List valueObj = prepareListValue(p, value);
Objects.requireNonNull(listGetter, "listGetter is empty"); assignments.add(QueryBuilder.discardAll(p.getColumnName(), valueObj));
Objects.requireNonNull(value, "value is empty");
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter); addPropertyNode(p);
List valueObj = prepareListValue(p, value);
assignments.add(QueryBuilder.discardAll(p.getColumnName(), valueObj)); if (draft != null) {
String key = p.getProperty().getPropertyName();
List<V> list = (List<V>) draftMap.get(key);
list.removeAll(value);
}
addPropertyNode(p); return this;
}
if (draft != null) { private Object prepareSingleListValue(HelenusPropertyNode p, Object value) {
String key = p.getProperty().getPropertyName(); HelenusProperty prop = p.getProperty();
List<V> list = (List<V>) draftMap.get(key);
list.removeAll(value);
}
return this; Object valueObj = value;
}
private Object prepareSingleListValue(HelenusPropertyNode p, Object value) { Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
HelenusProperty prop = p.getProperty(); if (converter.isPresent()) {
List convertedList = (List) converter.get().apply(Immutables.listOf(value));
valueObj = convertedList.get(0);
}
Object valueObj = value; return valueObj;
}
Optional<Function<Object, Object>> converter = private List prepareListValue(HelenusPropertyNode p, List value) {
prop.getWriteConverter(sessionOps.getSessionRepository());
if (converter.isPresent()) {
List convertedList = (List) converter.get().apply(Immutables.listOf(value));
valueObj = convertedList.get(0);
}
return valueObj; HelenusProperty prop = p.getProperty();
}
private List prepareListValue(HelenusPropertyNode p, List value) { List valueObj = value;
HelenusProperty prop = p.getProperty(); Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
if (converter.isPresent()) {
valueObj = (List) converter.get().apply(value);
}
List valueObj = value; return valueObj;
}
Optional<Function<Object, Object>> converter = /*
prop.getWriteConverter(sessionOps.getSessionRepository()); *
if (converter.isPresent()) { *
valueObj = (List) converter.get().apply(value); * SET
} *
*
*/
return valueObj; public <V> UpdateOperation<E> add(Getter<Set<V>> setGetter, V value) {
}
/* Objects.requireNonNull(setGetter, "setGetter is empty");
* Objects.requireNonNull(value, "value is empty");
*
* SET
*
*
*/
public <V> UpdateOperation<E> add(Getter<Set<V>> setGetter, V value) { HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
Object valueObj = prepareSingleSetValue(p, value);
Objects.requireNonNull(setGetter, "setGetter is empty"); assignments.add(QueryBuilder.add(p.getColumnName(), valueObj));
Objects.requireNonNull(value, "value is empty");
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter); addPropertyNode(p);
Object valueObj = prepareSingleSetValue(p, value);
assignments.add(QueryBuilder.add(p.getColumnName(), valueObj)); if (draft != null) {
String key = p.getProperty().getPropertyName();
Set<V> set = (Set<V>) draftMap.get(key);
set.add(value);
}
addPropertyNode(p); return this;
}
if (draft != null) { public <V> UpdateOperation<E> addAll(Getter<Set<V>> setGetter, Set<V> value) {
String key = p.getProperty().getPropertyName();
Set<V> set = (Set<V>) draftMap.get(key);
set.add(value);
}
return this; Objects.requireNonNull(setGetter, "setGetter is empty");
} Objects.requireNonNull(value, "value is empty");
public <V> UpdateOperation<E> addAll(Getter<Set<V>> setGetter, Set<V> value) { HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
Set valueObj = prepareSetValue(p, value);
Objects.requireNonNull(setGetter, "setGetter is empty"); assignments.add(QueryBuilder.addAll(p.getColumnName(), valueObj));
Objects.requireNonNull(value, "value is empty");
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter); addPropertyNode(p);
Set valueObj = prepareSetValue(p, value);
assignments.add(QueryBuilder.addAll(p.getColumnName(), valueObj)); if (draft != null) {
String key = p.getProperty().getPropertyName();
Set<V> set = (Set<V>) draftMap.get(key);
set.addAll(value);
}
addPropertyNode(p); return this;
}
if (draft != null) { public <V> UpdateOperation<E> remove(Getter<Set<V>> setGetter, V value) {
String key = p.getProperty().getPropertyName();
Set<V> set = (Set<V>) draftMap.get(key);
set.addAll(value);
}
return this; Objects.requireNonNull(setGetter, "setGetter is empty");
} Objects.requireNonNull(value, "value is empty");
public <V> UpdateOperation<E> remove(Getter<Set<V>> setGetter, V value) { HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
Object valueObj = prepareSingleSetValue(p, value);
Objects.requireNonNull(setGetter, "setGetter is empty"); assignments.add(QueryBuilder.remove(p.getColumnName(), valueObj));
Objects.requireNonNull(value, "value is empty");
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter); addPropertyNode(p);
Object valueObj = prepareSingleSetValue(p, value);
assignments.add(QueryBuilder.remove(p.getColumnName(), valueObj)); if (draft != null) {
String key = p.getProperty().getPropertyName();
Set<V> set = (Set<V>) draftMap.get(key);
set.remove(value);
}
addPropertyNode(p); return this;
}
if (draft != null) { public <V> UpdateOperation<E> removeAll(Getter<Set<V>> setGetter, Set<V> value) {
String key = p.getProperty().getPropertyName();
Set<V> set = (Set<V>) draftMap.get(key);
set.remove(value);
}
return this; Objects.requireNonNull(setGetter, "setGetter is empty");
} Objects.requireNonNull(value, "value is empty");
public <V> UpdateOperation<E> removeAll(Getter<Set<V>> setGetter, Set<V> value) { HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
Set valueObj = prepareSetValue(p, value);
Objects.requireNonNull(setGetter, "setGetter is empty"); assignments.add(QueryBuilder.removeAll(p.getColumnName(), valueObj));
Objects.requireNonNull(value, "value is empty");
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter); addPropertyNode(p);
Set valueObj = prepareSetValue(p, value);
assignments.add(QueryBuilder.removeAll(p.getColumnName(), valueObj)); if (draft != null) {
String key = p.getProperty().getPropertyName();
Set<V> set = (Set<V>) draftMap.get(key);
set.removeAll(value);
}
addPropertyNode(p); return this;
}
if (draft != null) { private Object prepareSingleSetValue(HelenusPropertyNode p, Object value) {
String key = p.getProperty().getPropertyName();
Set<V> set = (Set<V>) draftMap.get(key);
set.removeAll(value);
}
return this; HelenusProperty prop = p.getProperty();
} Object valueObj = value;
private Object prepareSingleSetValue(HelenusPropertyNode p, Object value) { Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
if (converter.isPresent()) {
Set convertedSet = (Set) converter.get().apply(Immutables.setOf(value));
valueObj = convertedSet.iterator().next();
}
HelenusProperty prop = p.getProperty(); return valueObj;
Object valueObj = value; }
Optional<Function<Object, Object>> converter = private Set prepareSetValue(HelenusPropertyNode p, Set value) {
prop.getWriteConverter(sessionOps.getSessionRepository());
if (converter.isPresent()) {
Set convertedSet = (Set) converter.get().apply(Immutables.setOf(value));
valueObj = convertedSet.iterator().next();
}
return valueObj; HelenusProperty prop = p.getProperty();
} Set valueObj = value;
private Set prepareSetValue(HelenusPropertyNode p, Set value) { Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
if (converter.isPresent()) {
valueObj = (Set) converter.get().apply(value);
}
HelenusProperty prop = p.getProperty(); return valueObj;
Set valueObj = value; }
Optional<Function<Object, Object>> converter = /*
prop.getWriteConverter(sessionOps.getSessionRepository()); *
if (converter.isPresent()) { *
valueObj = (Set) converter.get().apply(value); * MAP
} *
*
*/
return valueObj; public <K, V> UpdateOperation<E> put(Getter<Map<K, V>> mapGetter, K key, V value) {
}
/* Objects.requireNonNull(mapGetter, "mapGetter is empty");
* Objects.requireNonNull(key, "key is empty");
*
* MAP
*
*
*/
public <K, V> UpdateOperation<E> put(Getter<Map<K, V>> mapGetter, K key, V value) { HelenusPropertyNode p = MappingUtil.resolveMappingProperty(mapGetter);
HelenusProperty prop = p.getProperty();
Objects.requireNonNull(mapGetter, "mapGetter is empty"); Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
Objects.requireNonNull(key, "key is empty"); if (converter.isPresent()) {
Map<Object, Object> convertedMap = (Map<Object, Object>) converter.get()
.apply(Immutables.mapOf(key, value));
for (Map.Entry<Object, Object> e : convertedMap.entrySet()) {
assignments.add(QueryBuilder.put(p.getColumnName(), e.getKey(), e.getValue()));
}
} else {
assignments.add(QueryBuilder.put(p.getColumnName(), key, value));
}
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(mapGetter); addPropertyNode(p);
HelenusProperty prop = p.getProperty();
Optional<Function<Object, Object>> converter = if (draft != null) {
prop.getWriteConverter(sessionOps.getSessionRepository()); ((Map<K, V>) draftMap.get(prop.getPropertyName())).put(key, value);
if (converter.isPresent()) { }
Map<Object, Object> convertedMap =
(Map<Object, Object>) converter.get().apply(Immutables.mapOf(key, value));
for (Map.Entry<Object, Object> e : convertedMap.entrySet()) {
assignments.add(QueryBuilder.put(p.getColumnName(), e.getKey(), e.getValue()));
}
} else {
assignments.add(QueryBuilder.put(p.getColumnName(), key, value));
}
addPropertyNode(p); return this;
}
if (draft != null) { public <K, V> UpdateOperation<E> putAll(Getter<Map<K, V>> mapGetter, Map<K, V> map) {
((Map<K, V>) draftMap.get(prop.getPropertyName())).put(key, value);
}
return this; Objects.requireNonNull(mapGetter, "mapGetter is empty");
} Objects.requireNonNull(map, "map is empty");
public <K, V> UpdateOperation<E> putAll(Getter<Map<K, V>> mapGetter, Map<K, V> map) { HelenusPropertyNode p = MappingUtil.resolveMappingProperty(mapGetter);
HelenusProperty prop = p.getProperty();
Objects.requireNonNull(mapGetter, "mapGetter is empty");
Objects.requireNonNull(map, "map is empty"); Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
if (converter.isPresent()) {
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(mapGetter); Map convertedMap = (Map) converter.get().apply(map);
HelenusProperty prop = p.getProperty(); assignments.add(QueryBuilder.putAll(p.getColumnName(), convertedMap));
} else {
Optional<Function<Object, Object>> converter = assignments.add(QueryBuilder.putAll(p.getColumnName(), map));
prop.getWriteConverter(sessionOps.getSessionRepository()); }
if (converter.isPresent()) {
Map convertedMap = (Map) converter.get().apply(map); addPropertyNode(p);
assignments.add(QueryBuilder.putAll(p.getColumnName(), convertedMap));
} else { if (draft != null) {
assignments.add(QueryBuilder.putAll(p.getColumnName(), map)); ((Map<K, V>) draftMap.get(prop.getPropertyName())).putAll(map);
} }
addPropertyNode(p); return this;
}
if (draft != null) {
((Map<K, V>) draftMap.get(prop.getPropertyName())).putAll(map); @Override
} public BuiltStatement buildStatement(boolean cached) {
return this; if (entity == null) {
} throw new HelenusMappingException("empty update operation");
}
@Override
public BuiltStatement buildStatement(boolean cached) { Update update = QueryBuilder.update(entity.getName().toCql());
if (entity == null) { for (Assignment assignment : assignments) {
throw new HelenusMappingException("empty update operation"); update.with(assignment);
} }
Update update = QueryBuilder.update(entity.getName().toCql()); if (filters != null && !filters.isEmpty()) {
for (Assignment assignment : assignments) { for (Filter<?> filter : filters) {
update.with(assignment); update.where(filter.getClause(sessionOps.getValuePreparer()));
} }
}
if (filters != null && !filters.isEmpty()) {
if (ifFilters != null && !ifFilters.isEmpty()) {
for (Filter<?> filter : filters) {
update.where(filter.getClause(sessionOps.getValuePreparer())); for (Filter<?> filter : ifFilters) {
} update.onlyIf(filter.getClause(sessionOps.getValuePreparer()));
} }
}
if (ifFilters != null && !ifFilters.isEmpty()) {
if (this.ttl != null) {
for (Filter<?> filter : ifFilters) { update.using(QueryBuilder.ttl(this.ttl[0]));
update.onlyIf(filter.getClause(sessionOps.getValuePreparer())); }
}
} if (this.timestamp != null) {
update.using(QueryBuilder.timestamp(this.timestamp[0]));
if (this.ttl != null) { }
update.using(QueryBuilder.ttl(this.ttl[0]));
} return update;
}
if (this.timestamp != null) {
update.using(QueryBuilder.timestamp(this.timestamp[0])); @Override
} public E transform(ResultSet resultSet) {
if (draft != null) {
return update; return Helenus.map(draft.getEntityClass(), draft.toMap(draftMap));
} } else {
return (E) resultSet;
@Override }
public E transform(ResultSet resultSet) { }
if (draft != null) {
return Helenus.map(draft.getEntityClass(), draft.toMap(draftMap)); public UpdateOperation<E> usingTtl(int ttl) {
} else { this.ttl = new int[1];
return (E) resultSet; this.ttl[0] = ttl;
} return this;
} }
public UpdateOperation<E> usingTtl(int ttl) { public UpdateOperation<E> usingTimestamp(long timestamp) {
this.ttl = new int[1]; this.timestamp = new long[1];
this.ttl[0] = ttl; this.timestamp[0] = timestamp;
return this; return this;
} }
public UpdateOperation<E> usingTimestamp(long timestamp) { private void addPropertyNode(HelenusPropertyNode p) {
this.timestamp = new long[1]; if (entity == null) {
this.timestamp[0] = timestamp; entity = p.getEntity();
return this; } else if (entity != p.getEntity()) {
} throw new HelenusMappingException("you can update columns only in single entity "
+ entity.getMappingInterface() + " or " + p.getEntity().getMappingInterface());
private void addPropertyNode(HelenusPropertyNode p) { }
if (entity == null) { }
entity = p.getEntity();
} else if (entity != p.getEntity()) { @Override
throw new HelenusMappingException( public E sync(UnitOfWork uow) {// throws TimeoutException {
"you can update columns only in single entity " if (uow == null) {
+ entity.getMappingInterface() return sync();
+ " or " }
+ p.getEntity().getMappingInterface()); E result = super.sync(uow);
} // TODO(gburd): Only drafted entity objects are updated in the cache at this
} // time.
if (draft != null) {
@Override updateCache(uow, result, getFacets());
public E sync(UnitOfWork uow) { }
if (uow == null) { return result;
return sync(); }
}
E result = super.sync(uow);
if (draft != null) {
String key = getStatementCacheKey();
if (key != null) {
Set<Object> set = new HashSet<Object>(1);
set.add(result);
uow.getCache().put(key, set);
}
}
return result;
}
} }

View file

@ -19,41 +19,34 @@ import java.util.HashMap;
import java.util.Map; import java.util.Map;
public enum DefaultPrimitiveTypes { public enum DefaultPrimitiveTypes {
BOOLEAN(boolean.class, false), BOOLEAN(boolean.class, false), BYTE(byte.class, (byte) 0x0), CHAR(char.class, (char) 0x0), SHORT(short.class,
BYTE(byte.class, (byte) 0x0), (short) 0), INT(int.class, 0), LONG(long.class, 0L), FLOAT(float.class, 0.0f), DOUBLE(double.class, 0.0);
CHAR(char.class, (char) 0x0),
SHORT(short.class, (short) 0),
INT(int.class, 0),
LONG(long.class, 0L),
FLOAT(float.class, 0.0f),
DOUBLE(double.class, 0.0);
private final Class<?> primitiveClass; private static final Map<Class<?>, DefaultPrimitiveTypes> map = new HashMap<Class<?>, DefaultPrimitiveTypes>();
private final Object defaultValue;
private static final Map<Class<?>, DefaultPrimitiveTypes> map = static {
new HashMap<Class<?>, DefaultPrimitiveTypes>(); for (DefaultPrimitiveTypes type : DefaultPrimitiveTypes.values()) {
map.put(type.getPrimitiveClass(), type);
}
}
static { private final Class<?> primitiveClass;
for (DefaultPrimitiveTypes type : DefaultPrimitiveTypes.values()) { private final Object defaultValue;
map.put(type.getPrimitiveClass(), type);
}
}
private DefaultPrimitiveTypes(Class<?> primitiveClass, Object defaultValue) { private DefaultPrimitiveTypes(Class<?> primitiveClass, Object defaultValue) {
this.primitiveClass = primitiveClass; this.primitiveClass = primitiveClass;
this.defaultValue = defaultValue; this.defaultValue = defaultValue;
} }
public static DefaultPrimitiveTypes lookup(Class<?> primitiveClass) { public static DefaultPrimitiveTypes lookup(Class<?> primitiveClass) {
return map.get(primitiveClass); return map.get(primitiveClass);
} }
public Class<?> getPrimitiveClass() { public Class<?> getPrimitiveClass() {
return primitiveClass; return primitiveClass;
} }
public Object getDefaultValue() { public Object getDefaultValue() {
return defaultValue; return defaultValue;
} }
} }

View file

@ -1,11 +1,25 @@
/*
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.helenus.core.reflect; package net.helenus.core.reflect;
import java.util.Set; import java.util.Set;
public interface Drafted<T> extends MapExportable { public interface Drafted<T> extends MapExportable {
Set<String> mutated(); Set<String> mutated();
T build(); T build();
} }

View file

@ -16,17 +16,18 @@
package net.helenus.core.reflect; package net.helenus.core.reflect;
import com.datastax.driver.core.Metadata; import com.datastax.driver.core.Metadata;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
public interface DslExportable { public interface DslExportable {
public static final String GET_ENTITY_METHOD = "getHelenusMappingEntity"; String GET_ENTITY_METHOD = "getHelenusMappingEntity";
public static final String GET_PARENT_METHOD = "getParentDslHelenusPropertyNode"; String GET_PARENT_METHOD = "getParentDslHelenusPropertyNode";
public static final String SET_METADATA_METHOD = "setCassandraMetadataForHelenusSesion"; String SET_METADATA_METHOD = "setCassandraMetadataForHelenusSession";
HelenusEntity getHelenusMappingEntity(); HelenusEntity getHelenusMappingEntity();
HelenusPropertyNode getParentDslHelenusPropertyNode(); HelenusPropertyNode getParentDslHelenusPropertyNode();
void setCassandraMetadataForHelenusSesion(Metadata metadata); void setCassandraMetadataForHelenusSession(Metadata metadata);
} }

View file

@ -15,13 +15,15 @@
*/ */
package net.helenus.core.reflect; package net.helenus.core.reflect;
import com.datastax.driver.core.*;
import java.lang.reflect.InvocationHandler; import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.lang.reflect.Proxy; import java.lang.reflect.Proxy;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.Optional; import java.util.Optional;
import com.datastax.driver.core.*;
import net.helenus.core.Helenus; import net.helenus.core.Helenus;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.HelenusMappingEntity; import net.helenus.mapping.HelenusMappingEntity;
@ -34,180 +36,162 @@ import net.helenus.support.HelenusException;
public class DslInvocationHandler<E> implements InvocationHandler { public class DslInvocationHandler<E> implements InvocationHandler {
private HelenusEntity entity = null; private final Class<E> iface;
private Metadata metadata = null; private final ClassLoader classLoader;
private final Optional<HelenusPropertyNode> parent;
private final Map<Method, HelenusProperty> map = new HashMap<Method, HelenusProperty>();
private final Map<Method, Object> udtMap = new HashMap<Method, Object>();
private final Map<Method, Object> tupleMap = new HashMap<Method, Object>();
private HelenusEntity entity = null;
private Metadata metadata = null;
private final Class<E> iface; public DslInvocationHandler(Class<E> iface, ClassLoader classLoader, Optional<HelenusPropertyNode> parent,
private final ClassLoader classLoader; Metadata metadata) {
private final Optional<HelenusPropertyNode> parent; this.metadata = metadata;
this.parent = parent;
this.iface = iface;
this.classLoader = classLoader;
}
private final Map<Method, HelenusProperty> map = new HashMap<Method, HelenusProperty>(); public void setCassandraMetadataForHelenusSession(Metadata metadata) {
if (metadata != null) {
this.metadata = metadata;
entity = init(metadata);
}
}
private final Map<Method, Object> udtMap = new HashMap<Method, Object>(); private HelenusEntity init(Metadata metadata) {
private final Map<Method, Object> tupleMap = new HashMap<Method, Object>(); HelenusEntity entity = new HelenusMappingEntity(iface, metadata);
public DslInvocationHandler( for (HelenusProperty prop : entity.getOrderedProperties()) {
Class<E> iface,
ClassLoader classLoader,
Optional<HelenusPropertyNode> parent,
Metadata metadata) {
this.metadata = metadata; map.put(prop.getGetterMethod(), prop);
this.parent = parent;
this.iface = iface;
this.classLoader = classLoader;
}
public void setCassandraMetadataForHelenusSesion(Metadata metadata) { AbstractDataType type = prop.getDataType();
if (metadata != null) { Class<?> javaType = prop.getJavaType();
this.metadata = metadata;
entity = init(metadata);
}
}
private HelenusEntity init(Metadata metadata) { if (type instanceof UDTDataType && !UDTValue.class.isAssignableFrom(javaType)) {
HelenusEntity entity = new HelenusMappingEntity(iface, metadata);
for (HelenusProperty prop : entity.getOrderedProperties()) { Object childDsl = Helenus.dsl(javaType, classLoader, Optional.of(new HelenusPropertyNode(prop, parent)),
metadata);
map.put(prop.getGetterMethod(), prop); udtMap.put(prop.getGetterMethod(), childDsl);
}
AbstractDataType type = prop.getDataType(); if (type instanceof DTDataType) {
Class<?> javaType = prop.getJavaType(); DTDataType dataType = (DTDataType) type;
if (type instanceof UDTDataType && !UDTValue.class.isAssignableFrom(javaType)) { if (dataType.getDataType() instanceof TupleType && !TupleValue.class.isAssignableFrom(javaType)) {
Object childDsl = Object childDsl = Helenus.dsl(javaType, classLoader,
Helenus.dsl( Optional.of(new HelenusPropertyNode(prop, parent)), metadata);
javaType,
classLoader,
Optional.of(new HelenusPropertyNode(prop, parent)),
metadata);
udtMap.put(prop.getGetterMethod(), childDsl); tupleMap.put(prop.getGetterMethod(), childDsl);
} }
}
}
if (type instanceof DTDataType) { return entity;
DTDataType dataType = (DTDataType) type; }
if (dataType.getDataType() instanceof TupleType @Override
&& !TupleValue.class.isAssignableFrom(javaType)) { public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
Object childDsl = HelenusEntity entity = this.entity;
Helenus.dsl( String methodName = method.getName();
javaType,
classLoader,
Optional.of(new HelenusPropertyNode(prop, parent)),
metadata);
tupleMap.put(prop.getGetterMethod(), childDsl); if ("equals".equals(methodName) && method.getParameterCount() == 1) {
} Object otherObj = args[0];
} if (otherObj == null) {
} return false;
}
if (Proxy.isProxyClass(otherObj.getClass())) {
return this == Proxy.getInvocationHandler(otherObj);
}
return false;
}
return entity; if (DslExportable.SET_METADATA_METHOD.equals(methodName) && args.length == 1 && args[0] instanceof Metadata) {
} if (metadata == null) {
this.setCassandraMetadataForHelenusSession((Metadata) args[0]);
}
return null;
}
@Override if (method.getParameterCount() != 0 || method.getReturnType() == void.class) {
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { throw new HelenusException("invalid getter method " + method);
}
HelenusEntity entity = this.entity; if ("hashCode".equals(methodName)) {
String methodName = method.getName(); return hashCode();
}
if ("equals".equals(methodName) && method.getParameterCount() == 1) { if (DslExportable.GET_PARENT_METHOD.equals(methodName)) {
Object otherObj = args[0]; return parent.get();
if (otherObj == null) { }
return false;
}
if (Proxy.isProxyClass(otherObj.getClass())) {
return this == Proxy.getInvocationHandler(otherObj);
}
return false;
}
if (DslExportable.SET_METADATA_METHOD.equals(methodName) if (entity == null) {
&& args.length == 1 entity = init(metadata);
&& args[0] instanceof Metadata) { }
if (metadata == null) {
this.setCassandraMetadataForHelenusSesion((Metadata) args[0]);
}
return null;
}
if (method.getParameterCount() != 0 || method.getReturnType() == void.class) { if ("toString".equals(methodName)) {
throw new HelenusException("invalid getter method " + method); return entity.toString();
} }
if ("hashCode".equals(methodName)) { if (DslExportable.GET_ENTITY_METHOD.equals(methodName)) {
return hashCode(); return entity;
} }
if (DslExportable.GET_PARENT_METHOD.equals(methodName)) { HelenusProperty prop = map.get(method);
return parent.get(); if (prop == null) {
} prop = entity.getProperty(methodName);
}
if (entity == null) { if (prop != null) {
entity = init(metadata);
}
if ("toString".equals(methodName)) { AbstractDataType type = prop.getDataType();
return entity.toString();
}
if (DslExportable.GET_ENTITY_METHOD.equals(methodName)) { if (type instanceof UDTDataType) {
return entity;
}
HelenusProperty prop = map.get(method); Object childDsl = udtMap.get(method);
if (prop == null) {
prop = entity.getProperty(methodName);
}
if (prop != null) { if (childDsl != null) {
return childDsl;
}
}
AbstractDataType type = prop.getDataType(); if (type instanceof DTDataType) {
DTDataType dataType = (DTDataType) type;
DataType dt = dataType.getDataType();
if (type instanceof UDTDataType) { switch (dt.getName()) {
case TUPLE :
Object childDsl = tupleMap.get(method);
Object childDsl = udtMap.get(method); if (childDsl != null) {
return childDsl;
}
if (childDsl != null) { break;
return childDsl;
}
}
if (type instanceof DTDataType) { case SET :
DTDataType dataType = (DTDataType) type; return new SetDsl(new HelenusPropertyNode(prop, parent));
DataType dt = dataType.getDataType();
switch (dt.getName()) { case LIST :
case TUPLE: return new ListDsl(new HelenusPropertyNode(prop, parent));
Object childDsl = tupleMap.get(method);
if (childDsl != null) { case MAP :
return childDsl; return new MapDsl(new HelenusPropertyNode(prop, parent));
}
break; default :
break;
}
}
case SET: throw new DslPropertyException(new HelenusPropertyNode(prop, parent));
return new SetDsl(new HelenusPropertyNode(prop, parent)); }
case LIST: throw new HelenusException("invalid method call " + method);
return new ListDsl(new HelenusPropertyNode(prop, parent)); }
case MAP:
return new MapDsl(new HelenusPropertyNode(prop, parent));
default:
break;
}
}
throw new DslPropertyException(new HelenusPropertyNode(prop, parent));
}
throw new HelenusException("invalid method call " + method);
}
} }

View file

@ -19,7 +19,9 @@ import java.lang.annotation.Annotation;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.Optional; import java.util.Optional;
import java.util.function.Function; import java.util.function.Function;
import javax.validation.ConstraintValidator; import javax.validation.ConstraintValidator;
import net.helenus.core.SessionRepository; import net.helenus.core.SessionRepository;
import net.helenus.mapping.*; import net.helenus.mapping.*;
import net.helenus.mapping.type.AbstractDataType; import net.helenus.mapping.type.AbstractDataType;
@ -27,79 +29,79 @@ import net.helenus.support.HelenusMappingException;
public final class HelenusNamedProperty implements HelenusProperty { public final class HelenusNamedProperty implements HelenusProperty {
private final String name; private final String name;
public HelenusNamedProperty(String name) { public HelenusNamedProperty(String name) {
this.name = name; this.name = name;
} }
@Override @Override
public HelenusEntity getEntity() { public HelenusEntity getEntity() {
throw new HelenusMappingException("will never called"); throw new HelenusMappingException("will never called");
} }
@Override @Override
public String getPropertyName() { public String getPropertyName() {
return name; return name;
} }
@Override @Override
public Method getGetterMethod() { public Method getGetterMethod() {
throw new HelenusMappingException("will never called"); throw new HelenusMappingException("will never called");
} }
@Override @Override
public IdentityName getColumnName() { public IdentityName getColumnName() {
return IdentityName.of(name, false); return IdentityName.of(name, false);
} }
@Override @Override
public Optional<IdentityName> getIndexName() { public Optional<IdentityName> getIndexName() {
return Optional.empty(); return Optional.empty();
} }
@Override @Override
public boolean caseSensitiveIndex() { public boolean caseSensitiveIndex() {
return false; return false;
} }
@Override @Override
public Class<?> getJavaType() { public Class<?> getJavaType() {
throw new HelenusMappingException("will never called"); throw new HelenusMappingException("will never called");
} }
@Override @Override
public AbstractDataType getDataType() { public AbstractDataType getDataType() {
throw new HelenusMappingException("will never called"); throw new HelenusMappingException("will never called");
} }
@Override @Override
public ColumnType getColumnType() { public ColumnType getColumnType() {
return ColumnType.COLUMN; return ColumnType.COLUMN;
} }
@Override @Override
public int getOrdinal() { public int getOrdinal() {
return 0; return 0;
} }
@Override @Override
public OrderingDirection getOrdering() { public OrderingDirection getOrdering() {
return OrderingDirection.ASC; return OrderingDirection.ASC;
} }
@Override @Override
public Optional<Function<Object, Object>> getReadConverter(SessionRepository repository) { public Optional<Function<Object, Object>> getReadConverter(SessionRepository repository) {
return Optional.empty(); return Optional.empty();
} }
@Override @Override
public Optional<Function<Object, Object>> getWriteConverter(SessionRepository repository) { public Optional<Function<Object, Object>> getWriteConverter(SessionRepository repository) {
return Optional.empty(); return Optional.empty();
} }
@Override @Override
public ConstraintValidator<? extends Annotation, ?>[] getValidators() { public ConstraintValidator<? extends Annotation, ?>[] getValidators() {
return MappingUtil.EMPTY_VALIDATORS; return MappingUtil.EMPTY_VALIDATORS;
} }
} }

View file

@ -17,89 +17,90 @@ package net.helenus.core.reflect;
import java.util.*; import java.util.*;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
public final class HelenusPropertyNode implements Iterable<HelenusProperty> { public final class HelenusPropertyNode implements Iterable<HelenusProperty> {
private final HelenusProperty prop; private final HelenusProperty prop;
private final Optional<HelenusPropertyNode> next; private final Optional<HelenusPropertyNode> next;
public HelenusPropertyNode(HelenusProperty prop, Optional<HelenusPropertyNode> next) { public HelenusPropertyNode(HelenusProperty prop, Optional<HelenusPropertyNode> next) {
this.prop = prop; this.prop = prop;
this.next = next; this.next = next;
} }
public String getColumnName() { public String getColumnName() {
if (next.isPresent()) { if (next.isPresent()) {
List<String> columnNames = new ArrayList<String>(); List<String> columnNames = new ArrayList<String>();
for (HelenusProperty p : this) { for (HelenusProperty p : this) {
columnNames.add(p.getColumnName().toCql(true)); columnNames.add(p.getColumnName().toCql(true));
} }
Collections.reverse(columnNames); Collections.reverse(columnNames);
if (prop instanceof HelenusNamedProperty) { if (prop instanceof HelenusNamedProperty) {
int size = columnNames.size(); int size = columnNames.size();
StringBuilder str = new StringBuilder(); StringBuilder str = new StringBuilder();
for (int i = 0; i != size - 1; ++i) { for (int i = 0; i != size - 1; ++i) {
if (str.length() != 0) { if (str.length() != 0) {
str.append("."); str.append(".");
} }
str.append(columnNames.get(i)); str.append(columnNames.get(i));
} }
str.append("[").append(columnNames.get(size - 1)).append("]"); str.append("[").append(columnNames.get(size - 1)).append("]");
return str.toString(); return str.toString();
} else { } else {
return columnNames.stream().collect(Collectors.joining(".")); return columnNames.stream().collect(Collectors.joining("."));
} }
} else { } else {
return prop.getColumnName().toCql(); return prop.getColumnName().toCql();
} }
} }
public HelenusEntity getEntity() { public HelenusEntity getEntity() {
if (next.isPresent()) { if (next.isPresent()) {
HelenusProperty last = prop; HelenusProperty last = prop;
for (HelenusProperty p : this) { for (HelenusProperty p : this) {
last = p; last = p;
} }
return last.getEntity(); return last.getEntity();
} else { } else {
return prop.getEntity(); return prop.getEntity();
} }
} }
public HelenusProperty getProperty() { public HelenusProperty getProperty() {
return prop; return prop;
} }
public Optional<HelenusPropertyNode> getNext() { public Optional<HelenusPropertyNode> getNext() {
return next; return next;
} }
public Iterator<HelenusProperty> iterator() { public Iterator<HelenusProperty> iterator() {
return new PropertyNodeIterator(Optional.of(this)); return new PropertyNodeIterator(Optional.of(this));
} }
private static class PropertyNodeIterator implements Iterator<HelenusProperty> { private static class PropertyNodeIterator implements Iterator<HelenusProperty> {
private Optional<HelenusPropertyNode> next; private Optional<HelenusPropertyNode> next;
public PropertyNodeIterator(Optional<HelenusPropertyNode> next) { public PropertyNodeIterator(Optional<HelenusPropertyNode> next) {
this.next = next; this.next = next;
} }
@Override @Override
public boolean hasNext() { public boolean hasNext() {
return next.isPresent(); return next.isPresent();
} }
@Override @Override
public HelenusProperty next() { public HelenusProperty next() {
HelenusPropertyNode node = next.get(); HelenusPropertyNode node = next.get();
next = node.next; next = node.next;
return node.prop; return node.prop;
} }
} }
} }

View file

@ -16,164 +16,165 @@
package net.helenus.core.reflect; package net.helenus.core.reflect;
import java.util.*; import java.util.*;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
import net.helenus.support.DslPropertyException; import net.helenus.support.DslPropertyException;
import net.helenus.support.HelenusMappingException; import net.helenus.support.HelenusMappingException;
public final class ListDsl<V> implements List<V> { public final class ListDsl<V> implements List<V> {
private final HelenusPropertyNode parent; private final HelenusPropertyNode parent;
public ListDsl(HelenusPropertyNode parent) { public ListDsl(HelenusPropertyNode parent) {
this.parent = parent; this.parent = parent;
} }
public HelenusPropertyNode getParent() { public HelenusPropertyNode getParent() {
return parent; return parent;
} }
@Override @Override
public V get(int index) { public V get(int index) {
HelenusProperty prop = new HelenusNamedProperty(Integer.toString(index)); HelenusProperty prop = new HelenusNamedProperty(Integer.toString(index));
throw new DslPropertyException(new HelenusPropertyNode(prop, Optional.of(parent))); throw new DslPropertyException(new HelenusPropertyNode(prop, Optional.of(parent)));
} }
@Override @Override
public int size() { public int size() {
throwShouldNeverCall(); throwShouldNeverCall();
return 0; return 0;
} }
@Override @Override
public boolean isEmpty() { public boolean isEmpty() {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public boolean contains(Object o) { public boolean contains(Object o) {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public Iterator<V> iterator() { public Iterator<V> iterator() {
throwShouldNeverCall(); throwShouldNeverCall();
return null; return null;
} }
@Override @Override
public Object[] toArray() { public Object[] toArray() {
throwShouldNeverCall(); throwShouldNeverCall();
return null; return null;
} }
@Override @Override
public <T> T[] toArray(T[] a) { public <T> T[] toArray(T[] a) {
throwShouldNeverCall(); throwShouldNeverCall();
return null; return null;
} }
@Override @Override
public boolean add(V e) { public boolean add(V e) {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public boolean remove(Object o) { public boolean remove(Object o) {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public boolean containsAll(Collection<?> c) { public boolean containsAll(Collection<?> c) {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public boolean addAll(Collection<? extends V> c) { public boolean addAll(Collection<? extends V> c) {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public boolean addAll(int index, Collection<? extends V> c) { public boolean addAll(int index, Collection<? extends V> c) {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public boolean removeAll(Collection<?> c) { public boolean removeAll(Collection<?> c) {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public boolean retainAll(Collection<?> c) { public boolean retainAll(Collection<?> c) {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public void clear() { public void clear() {
throwShouldNeverCall(); throwShouldNeverCall();
} }
@Override @Override
public V set(int index, V element) { public V set(int index, V element) {
throwShouldNeverCall(); throwShouldNeverCall();
return null; return null;
} }
@Override @Override
public void add(int index, V element) { public void add(int index, V element) {
throwShouldNeverCall(); throwShouldNeverCall();
} }
@Override @Override
public V remove(int index) { public V remove(int index) {
throwShouldNeverCall(); throwShouldNeverCall();
return null; return null;
} }
@Override @Override
public int indexOf(Object o) { public int indexOf(Object o) {
throwShouldNeverCall(); throwShouldNeverCall();
return 0; return 0;
} }
@Override @Override
public int lastIndexOf(Object o) { public int lastIndexOf(Object o) {
throwShouldNeverCall(); throwShouldNeverCall();
return 0; return 0;
} }
@Override @Override
public ListIterator<V> listIterator() { public ListIterator<V> listIterator() {
throwShouldNeverCall(); throwShouldNeverCall();
return null; return null;
} }
@Override @Override
public ListIterator<V> listIterator(int index) { public ListIterator<V> listIterator(int index) {
throwShouldNeverCall(); throwShouldNeverCall();
return null; return null;
} }
@Override @Override
public List<V> subList(int fromIndex, int toIndex) { public List<V> subList(int fromIndex, int toIndex) {
throwShouldNeverCall(); throwShouldNeverCall();
return null; return null;
} }
private void throwShouldNeverCall() { private void throwShouldNeverCall() {
throw new HelenusMappingException("should be never called"); throw new HelenusMappingException("should be never called");
} }
@Override @Override
public String toString() { public String toString() {
return "ListDsl"; return "ListDsl";
} }
} }

View file

@ -19,98 +19,99 @@ import java.util.Collection;
import java.util.Map; import java.util.Map;
import java.util.Optional; import java.util.Optional;
import java.util.Set; import java.util.Set;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
import net.helenus.support.DslPropertyException; import net.helenus.support.DslPropertyException;
import net.helenus.support.HelenusMappingException; import net.helenus.support.HelenusMappingException;
public final class MapDsl<K, V> implements Map<K, V> { public final class MapDsl<K, V> implements Map<K, V> {
private final HelenusPropertyNode parent; private final HelenusPropertyNode parent;
public MapDsl(HelenusPropertyNode parent) { public MapDsl(HelenusPropertyNode parent) {
this.parent = parent; this.parent = parent;
} }
public HelenusPropertyNode getParent() { public HelenusPropertyNode getParent() {
return parent; return parent;
} }
@Override @Override
public V get(Object key) { public V get(Object key) {
HelenusProperty prop = new HelenusNamedProperty(key.toString()); HelenusProperty prop = new HelenusNamedProperty(key.toString());
throw new DslPropertyException(new HelenusPropertyNode(prop, Optional.of(parent))); throw new DslPropertyException(new HelenusPropertyNode(prop, Optional.of(parent)));
} }
@Override @Override
public int size() { public int size() {
throwShouldNeverCall(); throwShouldNeverCall();
return 0; return 0;
} }
@Override @Override
public boolean isEmpty() { public boolean isEmpty() {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public boolean containsKey(Object key) { public boolean containsKey(Object key) {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public boolean containsValue(Object value) { public boolean containsValue(Object value) {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public V put(K key, V value) { public V put(K key, V value) {
throwShouldNeverCall(); throwShouldNeverCall();
return null; return null;
} }
@Override @Override
public V remove(Object key) { public V remove(Object key) {
throwShouldNeverCall(); throwShouldNeverCall();
return null; return null;
} }
@Override @Override
public void putAll(Map<? extends K, ? extends V> m) { public void putAll(Map<? extends K, ? extends V> m) {
throwShouldNeverCall(); throwShouldNeverCall();
} }
@Override @Override
public void clear() { public void clear() {
throwShouldNeverCall(); throwShouldNeverCall();
} }
@Override @Override
public Set<K> keySet() { public Set<K> keySet() {
throwShouldNeverCall(); throwShouldNeverCall();
return null; return null;
} }
@Override @Override
public Collection<V> values() { public Collection<V> values() {
throwShouldNeverCall(); throwShouldNeverCall();
return null; return null;
} }
@Override @Override
public Set<java.util.Map.Entry<K, V>> entrySet() { public Set<java.util.Map.Entry<K, V>> entrySet() {
throwShouldNeverCall(); throwShouldNeverCall();
return null; return null;
} }
private void throwShouldNeverCall() { private void throwShouldNeverCall() {
throw new HelenusMappingException("should be never called"); throw new HelenusMappingException("should be never called");
} }
@Override @Override
public String toString() { public String toString() {
return "MapDsl"; return "MapDsl";
} }
} }

View file

@ -19,7 +19,7 @@ import java.util.Map;
public interface MapExportable { public interface MapExportable {
public static final String TO_MAP_METHOD = "toMap"; public static final String TO_MAP_METHOD = "toMap";
Map<String, Object> toMap(); Map<String, Object> toMap();
} }

View file

@ -23,106 +23,113 @@ import java.lang.reflect.Method;
import java.lang.reflect.Proxy; import java.lang.reflect.Proxy;
import java.util.Collections; import java.util.Collections;
import java.util.Map; import java.util.Map;
import net.helenus.core.Helenus; import net.helenus.core.Helenus;
import net.helenus.mapping.annotation.Transient; import net.helenus.mapping.annotation.Transient;
import net.helenus.support.HelenusException; import net.helenus.support.HelenusException;
public class MapperInvocationHandler<E> implements InvocationHandler, Serializable { public class MapperInvocationHandler<E> implements InvocationHandler, Serializable {
private static final long serialVersionUID = -7044209982830584984L; private static final long serialVersionUID = -7044209982830584984L;
private final Map<String, Object> src; private final Map<String, Object> src;
private final Class<E> iface; private final Class<E> iface;
public MapperInvocationHandler(Class<E> iface, Map<String, Object> src) { public MapperInvocationHandler(Class<E> iface, Map<String, Object> src) {
this.src = src; this.src = src;
this.iface = iface; this.iface = iface;
} }
private Object invokeDefault(Object proxy, Method method, Object[] args) throws Throwable { private Object invokeDefault(Object proxy, Method method, Object[] args) throws Throwable {
// NOTE: This is reflection magic to invoke (non-recursively) a default method implemented on an interface // NOTE: This is reflection magic to invoke (non-recursively) a default method
// that we've proxied (in ReflectionDslInstantiator). I found the answer in this article. // implemented on an interface
// https://zeroturnaround.com/rebellabs/recognize-and-conquer-java-proxies-default-methods-and-method-handles/ // that we've proxied (in ReflectionDslInstantiator). I found the answer in this
// article.
// https://zeroturnaround.com/rebellabs/recognize-and-conquer-java-proxies-default-methods-and-method-handles/
// First, we need an instance of a private inner-class found in MethodHandles. // First, we need an instance of a private inner-class found in MethodHandles.
Constructor<MethodHandles.Lookup> constructor = Constructor<MethodHandles.Lookup> constructor = MethodHandles.Lookup.class.getDeclaredConstructor(Class.class,
MethodHandles.Lookup.class.getDeclaredConstructor(Class.class, int.class); int.class);
constructor.setAccessible(true); constructor.setAccessible(true);
// Now we need to lookup and invoke special the default method on the interface class. // Now we need to lookup and invoke special the default method on the interface
final Class<?> declaringClass = method.getDeclaringClass(); // class.
Object result = final Class<?> declaringClass = method.getDeclaringClass();
constructor Object result = constructor.newInstance(declaringClass, MethodHandles.Lookup.PRIVATE)
.newInstance(declaringClass, MethodHandles.Lookup.PRIVATE) .unreflectSpecial(method, declaringClass).bindTo(proxy).invokeWithArguments(args);
.unreflectSpecial(method, declaringClass) return result;
.bindTo(proxy) }
.invokeWithArguments(args);
return result;
}
@Override @Override
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
// Transient, default methods should simply be invoked as-is. // Transient, default methods should simply be invoked as-is.
if (method.isDefault() && method.getDeclaredAnnotation(Transient.class) != null) { if (method.isDefault() && method.getDeclaredAnnotation(Transient.class) != null) {
return invokeDefault(proxy, method, args); return invokeDefault(proxy, method, args);
} }
String methodName = method.getName(); String methodName = method.getName();
if ("equals".equals(methodName) && method.getParameterCount() == 1) { if ("equals".equals(methodName) && method.getParameterCount() == 1) {
Object otherObj = args[0]; Object otherObj = args[0];
if (otherObj == null) { if (otherObj == null) {
return false; return false;
} }
if (Proxy.isProxyClass(otherObj.getClass())) { if (Proxy.isProxyClass(otherObj.getClass())) {
return this == Proxy.getInvocationHandler(otherObj); if (this == Proxy.getInvocationHandler(otherObj)) {
} return true;
return false; }
} }
if (otherObj instanceof MapExportable && src.equals(((MapExportable) otherObj).toMap())) {
return true;
}
return false;
}
if (method.getParameterCount() != 0 || method.getReturnType() == void.class) { if (method.getParameterCount() != 0 || method.getReturnType() == void.class) {
throw new HelenusException("invalid getter method " + method); throw new HelenusException("invalid getter method " + method);
} }
if ("hashCode".equals(methodName)) { if ("hashCode".equals(methodName)) {
return hashCode(); return hashCode();
} }
if ("toString".equals(methodName)) { if ("toString".equals(methodName)) {
return iface.getSimpleName() + ": " + src.toString(); return iface.getSimpleName() + ": " + src.toString();
} }
if ("dsl".equals(methodName)) { if ("dsl".equals(methodName)) {
return Helenus.dsl(iface); return Helenus.dsl(iface);
} }
if (MapExportable.TO_MAP_METHOD.equals(methodName)) { if (MapExportable.TO_MAP_METHOD.equals(methodName)) {
return Collections.unmodifiableMap(src); return Collections.unmodifiableMap(src);
} }
Object value = src.get(methodName); Object value = src.get(methodName);
Class<?> returnType = method.getReturnType(); Class<?> returnType = method.getReturnType();
if (value == null) { if (value == null) {
// Default implementations of non-Transient methods in entities are the default value when the // Default implementations of non-Transient methods in entities are the default
// map contains 'null'. // value when the
if (method.isDefault()) { // map contains 'null'.
return invokeDefault(proxy, method, args); if (method.isDefault()) {
} return invokeDefault(proxy, method, args);
}
// Otherwise, if the return type of the method is a primitive Java type then we'll return the standard // Otherwise, if the return type of the method is a primitive Java type then
// default values to avoid a NPE in user code. // we'll return the standard
if (returnType.isPrimitive()) { // default values to avoid a NPE in user code.
DefaultPrimitiveTypes type = DefaultPrimitiveTypes.lookup(returnType); if (returnType.isPrimitive()) {
if (type == null) { DefaultPrimitiveTypes type = DefaultPrimitiveTypes.lookup(returnType);
throw new HelenusException("unknown primitive type " + returnType); if (type == null) {
} throw new HelenusException("unknown primitive type " + returnType);
return type.getDefaultValue(); }
} return type.getDefaultValue();
} }
}
return value; return value;
} }
} }

View file

@ -15,25 +15,22 @@
*/ */
package net.helenus.core.reflect; package net.helenus.core.reflect;
import com.datastax.driver.core.Metadata;
import java.lang.reflect.Proxy; import java.lang.reflect.Proxy;
import java.util.Optional; import java.util.Optional;
import com.datastax.driver.core.Metadata;
import net.helenus.core.DslInstantiator; import net.helenus.core.DslInstantiator;
public enum ReflectionDslInstantiator implements DslInstantiator { public enum ReflectionDslInstantiator implements DslInstantiator {
INSTANCE; INSTANCE;
@Override @Override
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public <E> E instantiate( public <E> E instantiate(Class<E> iface, ClassLoader classLoader, Optional<HelenusPropertyNode> parent,
Class<E> iface, Metadata metadata) {
ClassLoader classLoader, DslInvocationHandler<E> handler = new DslInvocationHandler<E>(iface, classLoader, parent, metadata);
Optional<HelenusPropertyNode> parent, E proxy = (E) Proxy.newProxyInstance(classLoader, new Class[]{iface, DslExportable.class}, handler);
Metadata metadata) { return proxy;
DslInvocationHandler<E> handler = }
new DslInvocationHandler<E>(iface, classLoader, parent, metadata);
E proxy =
(E) Proxy.newProxyInstance(classLoader, new Class[] {iface, DslExportable.class}, handler);
return proxy;
}
} }

View file

@ -19,14 +19,15 @@ import net.helenus.support.HelenusMappingException;
public final class ReflectionInstantiator { public final class ReflectionInstantiator {
private ReflectionInstantiator() {} private ReflectionInstantiator() {
}
public static <T> T instantiateClass(Class<T> clazz) { public static <T> T instantiateClass(Class<T> clazz) {
try { try {
return clazz.newInstance(); return clazz.newInstance();
} catch (InstantiationException | IllegalAccessException e) { } catch (InstantiationException | IllegalAccessException e) {
throw new HelenusMappingException("invalid class " + clazz, e); throw new HelenusMappingException("invalid class " + clazz, e);
} }
} }
} }

View file

@ -17,18 +17,18 @@ package net.helenus.core.reflect;
import java.lang.reflect.Proxy; import java.lang.reflect.Proxy;
import java.util.Map; import java.util.Map;
import net.helenus.core.MapperInstantiator; import net.helenus.core.MapperInstantiator;
public enum ReflectionMapperInstantiator implements MapperInstantiator { public enum ReflectionMapperInstantiator implements MapperInstantiator {
INSTANCE; INSTANCE;
@Override @Override
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public <E> E instantiate(Class<E> iface, Map<String, Object> src, ClassLoader classLoader) { public <E> E instantiate(Class<E> iface, Map<String, Object> src, ClassLoader classLoader) {
MapperInvocationHandler<E> handler = new MapperInvocationHandler<E>(iface, src); MapperInvocationHandler<E> handler = new MapperInvocationHandler<E>(iface, src);
E proxy = E proxy = (E) Proxy.newProxyInstance(classLoader, new Class[]{iface, MapExportable.class}, handler);
(E) Proxy.newProxyInstance(classLoader, new Class[] {iface, MapExportable.class}, handler); return proxy;
return proxy; }
}
} }

View file

@ -18,103 +18,104 @@ package net.helenus.core.reflect;
import java.util.Collection; import java.util.Collection;
import java.util.Iterator; import java.util.Iterator;
import java.util.Set; import java.util.Set;
import net.helenus.support.HelenusMappingException; import net.helenus.support.HelenusMappingException;
public final class SetDsl<V> implements Set<V> { public final class SetDsl<V> implements Set<V> {
private final HelenusPropertyNode parent; private final HelenusPropertyNode parent;
public SetDsl(HelenusPropertyNode parent) { public SetDsl(HelenusPropertyNode parent) {
this.parent = parent; this.parent = parent;
} }
public HelenusPropertyNode getParent() { public HelenusPropertyNode getParent() {
return parent; return parent;
} }
@Override @Override
public int size() { public int size() {
throwShouldNeverCall(); throwShouldNeverCall();
return 0; return 0;
} }
@Override @Override
public boolean isEmpty() { public boolean isEmpty() {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public boolean contains(Object o) { public boolean contains(Object o) {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public Iterator<V> iterator() { public Iterator<V> iterator() {
throwShouldNeverCall(); throwShouldNeverCall();
return null; return null;
} }
@Override @Override
public Object[] toArray() { public Object[] toArray() {
throwShouldNeverCall(); throwShouldNeverCall();
return null; return null;
} }
@Override @Override
public <T> T[] toArray(T[] a) { public <T> T[] toArray(T[] a) {
throwShouldNeverCall(); throwShouldNeverCall();
return null; return null;
} }
@Override @Override
public boolean add(V e) { public boolean add(V e) {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public boolean remove(Object o) { public boolean remove(Object o) {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public boolean containsAll(Collection<?> c) { public boolean containsAll(Collection<?> c) {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public boolean addAll(Collection<? extends V> c) { public boolean addAll(Collection<? extends V> c) {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public boolean retainAll(Collection<?> c) { public boolean retainAll(Collection<?> c) {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public boolean removeAll(Collection<?> c) { public boolean removeAll(Collection<?> c) {
throwShouldNeverCall(); throwShouldNeverCall();
return false; return false;
} }
@Override @Override
public void clear() { public void clear() {
throwShouldNeverCall(); throwShouldNeverCall();
} }
private void throwShouldNeverCall() { private void throwShouldNeverCall() {
throw new HelenusMappingException("should be never called"); throw new HelenusMappingException("should be never called");
} }
@Override @Override
public String toString() { public String toString() {
return "SetDsl"; return "SetDsl";
} }
} }

View file

@ -16,6 +16,7 @@
package net.helenus.mapping; package net.helenus.mapping;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import net.helenus.mapping.annotation.ClusteringColumn; import net.helenus.mapping.annotation.ClusteringColumn;
import net.helenus.mapping.annotation.Column; import net.helenus.mapping.annotation.Column;
import net.helenus.mapping.annotation.PartitionKey; import net.helenus.mapping.annotation.PartitionKey;
@ -24,99 +25,91 @@ import net.helenus.support.HelenusMappingException;
public final class ColumnInformation { public final class ColumnInformation {
private final IdentityName columnName; private final IdentityName columnName;
private final ColumnType columnType; private final ColumnType columnType;
private final int ordinal; private final int ordinal;
private final OrderingDirection ordering; private final OrderingDirection ordering;
public ColumnInformation(Method getter) { public ColumnInformation(Method getter) {
String columnName = null; String columnName = null;
boolean forceQuote = false; boolean forceQuote = false;
ColumnType columnTypeLocal = ColumnType.COLUMN; ColumnType columnTypeLocal = ColumnType.COLUMN;
int ordinalLocal = 0; int ordinalLocal = 0;
OrderingDirection orderingLocal = OrderingDirection.ASC; OrderingDirection orderingLocal = OrderingDirection.ASC;
PartitionKey partitionKey = getter.getDeclaredAnnotation(PartitionKey.class); PartitionKey partitionKey = getter.getDeclaredAnnotation(PartitionKey.class);
if (partitionKey != null) { if (partitionKey != null) {
columnName = partitionKey.value(); columnName = partitionKey.value();
forceQuote = partitionKey.forceQuote(); forceQuote = partitionKey.forceQuote();
columnTypeLocal = ColumnType.PARTITION_KEY; columnTypeLocal = ColumnType.PARTITION_KEY;
ordinalLocal = partitionKey.ordinal(); ordinalLocal = partitionKey.ordinal();
} }
ClusteringColumn clusteringColumn = getter.getDeclaredAnnotation(ClusteringColumn.class); ClusteringColumn clusteringColumn = getter.getDeclaredAnnotation(ClusteringColumn.class);
if (clusteringColumn != null) { if (clusteringColumn != null) {
ensureSingleColumnType(columnTypeLocal, getter); ensureSingleColumnType(columnTypeLocal, getter);
columnName = clusteringColumn.value(); columnName = clusteringColumn.value();
forceQuote = clusteringColumn.forceQuote(); forceQuote = clusteringColumn.forceQuote();
columnTypeLocal = ColumnType.CLUSTERING_COLUMN; columnTypeLocal = ColumnType.CLUSTERING_COLUMN;
ordinalLocal = clusteringColumn.ordinal(); ordinalLocal = clusteringColumn.ordinal();
orderingLocal = clusteringColumn.ordering(); orderingLocal = clusteringColumn.ordering();
} }
StaticColumn staticColumn = getter.getDeclaredAnnotation(StaticColumn.class); StaticColumn staticColumn = getter.getDeclaredAnnotation(StaticColumn.class);
if (staticColumn != null) { if (staticColumn != null) {
ensureSingleColumnType(columnTypeLocal, getter); ensureSingleColumnType(columnTypeLocal, getter);
columnName = staticColumn.value(); columnName = staticColumn.value();
forceQuote = staticColumn.forceQuote(); forceQuote = staticColumn.forceQuote();
columnTypeLocal = ColumnType.STATIC_COLUMN; columnTypeLocal = ColumnType.STATIC_COLUMN;
ordinalLocal = staticColumn.ordinal(); ordinalLocal = staticColumn.ordinal();
} }
Column column = getter.getDeclaredAnnotation(Column.class); Column column = getter.getDeclaredAnnotation(Column.class);
if (column != null) { if (column != null) {
ensureSingleColumnType(columnTypeLocal, getter); ensureSingleColumnType(columnTypeLocal, getter);
columnName = column.value(); columnName = column.value();
forceQuote = column.forceQuote(); forceQuote = column.forceQuote();
columnTypeLocal = ColumnType.COLUMN; columnTypeLocal = ColumnType.COLUMN;
ordinalLocal = column.ordinal(); ordinalLocal = column.ordinal();
} }
if (columnName == null || columnName.isEmpty()) { if (columnName == null || columnName.isEmpty()) {
columnName = MappingUtil.getDefaultColumnName(getter); columnName = MappingUtil.getDefaultColumnName(getter);
} }
this.columnName = new IdentityName(columnName, forceQuote); this.columnName = new IdentityName(columnName, forceQuote);
this.columnType = columnTypeLocal; this.columnType = columnTypeLocal;
this.ordinal = ordinalLocal; this.ordinal = ordinalLocal;
this.ordering = orderingLocal; this.ordering = orderingLocal;
} }
public IdentityName getColumnName() { public IdentityName getColumnName() {
return columnName; return columnName;
} }
public ColumnType getColumnType() { public ColumnType getColumnType() {
return columnType; return columnType;
} }
public int getOrdinal() { public int getOrdinal() {
return ordinal; return ordinal;
} }
public OrderingDirection getOrdering() { public OrderingDirection getOrdering() {
return ordering; return ordering;
} }
private void ensureSingleColumnType(ColumnType columnTypeLocal, Method getter) { private void ensureSingleColumnType(ColumnType columnTypeLocal, Method getter) {
if (columnTypeLocal != ColumnType.COLUMN) { if (columnTypeLocal != ColumnType.COLUMN) {
throw new HelenusMappingException( throw new HelenusMappingException("property can be annotated only by a single column type " + getter);
"property can be annotated only by a single column type " + getter); }
} }
}
@Override @Override
public String toString() { public String toString() {
return "ColumnInformation [columnName=" return "ColumnInformation [columnName=" + columnName + ", columnType=" + columnType + ", ordinal=" + ordinal
+ columnName + ", ordering=" + ordering + "]";
+ ", columnType=" }
+ columnType
+ ", ordinal="
+ ordinal
+ ", ordering="
+ ordering
+ "]";
}
} }

View file

@ -16,8 +16,5 @@
package net.helenus.mapping; package net.helenus.mapping;
public enum ColumnType { public enum ColumnType {
PARTITION_KEY, PARTITION_KEY, CLUSTERING_COLUMN, STATIC_COLUMN, COLUMN;
CLUSTERING_COLUMN,
STATIC_COLUMN,
COLUMN;
} }

View file

@ -16,18 +16,23 @@
package net.helenus.mapping; package net.helenus.mapping;
import java.util.Collection; import java.util.Collection;
import java.util.List;
import net.helenus.core.cache.Facet;
public interface HelenusEntity { public interface HelenusEntity {
HelenusEntityType getType(); HelenusEntityType getType();
boolean isCacheable(); boolean isCacheable();
Class<?> getMappingInterface(); Class<?> getMappingInterface();
IdentityName getName(); IdentityName getName();
Collection<HelenusProperty> getOrderedProperties(); Collection<HelenusProperty> getOrderedProperties();
HelenusProperty getProperty(String name); HelenusProperty getProperty(String name);
List<Facet> getFacets();
} }

View file

@ -16,8 +16,5 @@
package net.helenus.mapping; package net.helenus.mapping;
public enum HelenusEntityType { public enum HelenusEntityType {
TABLE, TABLE, VIEW, TUPLE, UDT;
VIEW,
TUPLE,
UDT;
} }

View file

@ -15,275 +15,293 @@
*/ */
package net.helenus.mapping; package net.helenus.mapping;
import com.datastax.driver.core.*;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.*; import java.util.*;
import org.apache.commons.lang3.ClassUtils;
import com.datastax.driver.core.DefaultMetadata;
import com.datastax.driver.core.Metadata;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import net.helenus.config.HelenusSettings; import net.helenus.config.HelenusSettings;
import net.helenus.core.Helenus; import net.helenus.core.Helenus;
import net.helenus.core.annotation.Cacheable; import net.helenus.core.annotation.Cacheable;
import net.helenus.core.cache.Facet;
import net.helenus.core.cache.UnboundFacet;
import net.helenus.mapping.annotation.*; import net.helenus.mapping.annotation.*;
import net.helenus.support.HelenusMappingException; import net.helenus.support.HelenusMappingException;
import org.apache.commons.lang3.ClassUtils;
public final class HelenusMappingEntity implements HelenusEntity { public final class HelenusMappingEntity implements HelenusEntity {
private final Class<?> iface; private final Class<?> iface;
private final HelenusEntityType type; private final HelenusEntityType type;
private final IdentityName name; private final IdentityName name;
private final boolean cacheable; private final boolean cacheable;
private final ImmutableMap<String, Method> methods; private final ImmutableMap<String, Method> methods;
private final ImmutableMap<String, HelenusProperty> props; private final ImmutableMap<String, HelenusProperty> props;
private final ImmutableList<HelenusProperty> orderedProps; private final ImmutableList<HelenusProperty> orderedProps;
private final List<Facet> facets;
public HelenusMappingEntity(Class<?> iface, Metadata metadata) { public HelenusMappingEntity(Class<?> iface, Metadata metadata) {
this(iface, autoDetectType(iface), metadata); this(iface, autoDetectType(iface), metadata);
} }
public HelenusMappingEntity(Class<?> iface, HelenusEntityType type, Metadata metadata) { public HelenusMappingEntity(Class<?> iface, HelenusEntityType type, Metadata metadata) {
if (iface == null || !iface.isInterface()) { if (iface == null || !iface.isInterface()) {
throw new IllegalArgumentException("invalid parameter " + iface); throw new IllegalArgumentException("invalid parameter " + iface);
} }
this.iface = iface; this.iface = iface;
this.type = Objects.requireNonNull(type, "type is empty"); this.type = Objects.requireNonNull(type, "type is empty");
this.name = resolveName(iface, type); this.name = resolveName(iface, type);
HelenusSettings settings = Helenus.settings(); HelenusSettings settings = Helenus.settings();
Map<String, Method> methods = new HashMap<String, Method>(); Map<String, Method> methods = new HashMap<String, Method>();
for (Method m : iface.getDeclaredMethods()) { for (Method m : iface.getDeclaredMethods()) {
methods.put(m.getName(), m); methods.put(m.getName(), m);
} }
for (Class<?> c : ClassUtils.getAllInterfaces(iface)) { for (Class<?> c : ClassUtils.getAllInterfaces(iface)) {
if (c.getDeclaredAnnotation(Table.class) != null if (c.getDeclaredAnnotation(Table.class) != null || c.getDeclaredAnnotation(InheritedTable.class) != null) {
|| c.getDeclaredAnnotation(InheritedTable.class) != null) { for (Method m : c.getDeclaredMethods()) {
for (Method m : c.getDeclaredMethods()) { Method o = methods.get(m.getName());
Method o = methods.get(m.getName()); if (o != null) {
if (o != null) { // Prefer overridden method implementation.
// Prefer overridden method implementation. if (o.getDeclaringClass().isAssignableFrom(m.getDeclaringClass())) {
if (o.getDeclaringClass().isAssignableFrom(m.getDeclaringClass())) { methods.put(m.getName(), m);
methods.put(m.getName(), m); }
} } else {
} else { methods.put(m.getName(), m);
methods.put(m.getName(), m); }
} }
} }
} }
}
List<HelenusProperty> propsLocal = new ArrayList<HelenusProperty>(); List<HelenusProperty> propsLocal = new ArrayList<HelenusProperty>();
ImmutableMap.Builder<String, HelenusProperty> propsBuilder = ImmutableMap.builder(); ImmutableMap.Builder<String, HelenusProperty> propsBuilder = ImmutableMap.builder();
ImmutableMap.Builder<String, Method> methodsBuilder = ImmutableMap.builder(); ImmutableMap.Builder<String, Method> methodsBuilder = ImmutableMap.builder();
for (Method method : methods.values()) { for (Method method : methods.values()) {
if (settings.getGetterMethodDetector().apply(method)) { if (settings.getGetterMethodDetector().apply(method)) {
methodsBuilder.put(method.getName(), method); methodsBuilder.put(method.getName(), method);
if (metadata != null) { if (metadata != null) {
HelenusProperty prop = new HelenusMappingProperty(this, method, metadata); HelenusProperty prop = new HelenusMappingProperty(this, method, metadata);
propsBuilder.put(prop.getPropertyName(), prop); propsBuilder.put(prop.getPropertyName(), prop);
propsLocal.add(prop); propsLocal.add(prop);
} }
} }
} }
this.methods = methodsBuilder.build(); this.methods = methodsBuilder.build();
this.props = propsBuilder.build(); this.props = propsBuilder.build();
Collections.sort(propsLocal, TypeAndOrdinalColumnComparator.INSTANCE); Collections.sort(propsLocal, TypeAndOrdinalColumnComparator.INSTANCE);
this.orderedProps = ImmutableList.copyOf(propsLocal); this.orderedProps = ImmutableList.copyOf(propsLocal);
validateOrdinals(); validateOrdinals();
cacheable = (null != iface.getDeclaredAnnotation(Cacheable.class)); // Caching
} cacheable = (null != iface.getDeclaredAnnotation(Cacheable.class));
@Override List<HelenusProperty> primaryKeyProperties = new ArrayList<>();
public HelenusEntityType getType() { ImmutableList.Builder<Facet> facetsBuilder = ImmutableList.builder();
return type; facetsBuilder.add(new Facet("table", name.toCql()).setFixed());
} for (HelenusProperty prop : orderedProps) {
switch (prop.getColumnType()) {
case PARTITION_KEY :
case CLUSTERING_COLUMN :
primaryKeyProperties.add(prop);
break;
default :
if (primaryKeyProperties != null && primaryKeyProperties.size() > 0) {
facetsBuilder.add(new UnboundFacet(primaryKeyProperties));
primaryKeyProperties = null;
}
Optional<IdentityName> optionalIndexName = prop.getIndexName();
if (optionalIndexName.isPresent()) {
UnboundFacet facet = new UnboundFacet(prop);
facetsBuilder.add(facet);
}
}
}
if (primaryKeyProperties != null && primaryKeyProperties.size() > 0) {
facetsBuilder.add(new UnboundFacet(primaryKeyProperties));
}
this.facets = facetsBuilder.build();
}
@Override private static IdentityName resolveName(Class<?> iface, HelenusEntityType type) {
public boolean isCacheable() {
return cacheable;
}
@Override switch (type) {
public Class<?> getMappingInterface() { case TABLE :
return iface; return MappingUtil.getTableName(iface, true);
}
@Override case VIEW :
public Collection<HelenusProperty> getOrderedProperties() { return MappingUtil.getViewName(iface, true);
return orderedProps;
}
@Override case TUPLE :
public HelenusProperty getProperty(String name) { return IdentityName.of(MappingUtil.getDefaultEntityName(iface), false);
HelenusProperty property = props.get(name);
if (property == null && methods.containsKey(name)) {
property = new HelenusMappingProperty(this, methods.get(name), new DefaultMetadata());
return property; //TODO(gburd): review adding these into the props map...
}
return props.get(name);
}
@Override case UDT :
public IdentityName getName() { return MappingUtil.getUserDefinedTypeName(iface, true);
return name; }
}
private static IdentityName resolveName(Class<?> iface, HelenusEntityType type) { throw new HelenusMappingException("invalid entity type " + type + " in " + type);
}
switch (type) { private static HelenusEntityType autoDetectType(Class<?> iface) {
case TABLE:
return MappingUtil.getTableName(iface, true);
case VIEW: Objects.requireNonNull(iface, "empty iface");
return MappingUtil.getViewName(iface, true);
case TUPLE: if (null != iface.getDeclaredAnnotation(Table.class)) {
return IdentityName.of(MappingUtil.getDefaultEntityName(iface), false); return HelenusEntityType.TABLE;
} else if (null != iface.getDeclaredAnnotation(MaterializedView.class)) {
return HelenusEntityType.VIEW;
} else if (null != iface.getDeclaredAnnotation(Tuple.class)) {
return HelenusEntityType.TUPLE;
} else if (null != iface.getDeclaredAnnotation(UDT.class)) {
return HelenusEntityType.UDT;
}
case UDT: throw new HelenusMappingException("entity must be annotated by @Table or @Tuple or @UserDefinedType " + iface);
return MappingUtil.getUserDefinedTypeName(iface, true); }
}
throw new HelenusMappingException("invalid entity type " + type + " in " + type); @Override
} public HelenusEntityType getType() {
return type;
}
private static HelenusEntityType autoDetectType(Class<?> iface) { @Override
public boolean isCacheable() {
return cacheable;
}
Objects.requireNonNull(iface, "empty iface"); @Override
public Class<?> getMappingInterface() {
return iface;
}
if (null != iface.getDeclaredAnnotation(Table.class)) { @Override
return HelenusEntityType.TABLE; public Collection<HelenusProperty> getOrderedProperties() {
} else if (null != iface.getDeclaredAnnotation(MaterializedView.class)) { return orderedProps;
return HelenusEntityType.VIEW; }
} else if (null != iface.getDeclaredAnnotation(Tuple.class)) {
return HelenusEntityType.TUPLE;
} else if (null != iface.getDeclaredAnnotation(UDT.class)) {
return HelenusEntityType.UDT;
}
throw new HelenusMappingException( @Override
"entity must be annotated by @Table or @Tuple or @UserDefinedType " + iface); public HelenusProperty getProperty(String name) {
} HelenusProperty property = props.get(name);
if (property == null && methods.containsKey(name)) {
property = new HelenusMappingProperty(this, methods.get(name), new DefaultMetadata());
return property; // TODO(gburd): review adding these into the props map...
}
return props.get(name);
}
private void validateOrdinals() { @Override
public List<Facet> getFacets() {
return facets;
}
switch (getType()) { @Override
case TABLE: public IdentityName getName() {
validateOrdinalsForTable(); return name;
break; }
case TUPLE: private void validateOrdinals() {
validateOrdinalsInTuple();
break;
default: switch (getType()) {
break; case TABLE :
} validateOrdinalsForTable();
} break;
private void validateOrdinalsForTable() { case TUPLE :
validateOrdinalsInTuple();
break;
BitSet partitionKeys = new BitSet(); default :
BitSet clusteringColumns = new BitSet(); break;
}
}
for (HelenusProperty prop : getOrderedProperties()) { private void validateOrdinalsForTable() {
ColumnType type = prop.getColumnType(); BitSet partitionKeys = new BitSet();
BitSet clusteringColumns = new BitSet();
int ordinal = prop.getOrdinal(); for (HelenusProperty prop : getOrderedProperties()) {
switch (type) { ColumnType type = prop.getColumnType();
case PARTITION_KEY:
if (partitionKeys.get(ordinal)) {
throw new HelenusMappingException(
"detected two or more partition key columns with the same ordinal "
+ ordinal
+ " in "
+ prop.getEntity());
}
partitionKeys.set(ordinal);
break;
case CLUSTERING_COLUMN: int ordinal = prop.getOrdinal();
if (clusteringColumns.get(ordinal)) {
throw new HelenusMappingException(
"detected two or clustering columns with the same ordinal "
+ ordinal
+ " in "
+ prop.getEntity());
}
clusteringColumns.set(ordinal);
break;
default: switch (type) {
break; case PARTITION_KEY :
} if (partitionKeys.get(ordinal)) {
} throw new HelenusMappingException(
} "detected two or more partition key columns with the same ordinal " + ordinal + " in "
+ prop.getEntity());
}
partitionKeys.set(ordinal);
break;
private void validateOrdinalsInTuple() { case CLUSTERING_COLUMN :
boolean[] ordinals = new boolean[props.size()]; if (clusteringColumns.get(ordinal)) {
throw new HelenusMappingException("detected two or clustering columns with the same ordinal "
+ ordinal + " in " + prop.getEntity());
}
clusteringColumns.set(ordinal);
break;
getOrderedProperties() default :
.forEach( break;
p -> { }
int ordinal = p.getOrdinal(); }
}
if (ordinal < 0 || ordinal >= ordinals.length) { private void validateOrdinalsInTuple() {
throw new HelenusMappingException( boolean[] ordinals = new boolean[props.size()];
"invalid ordinal "
+ ordinal
+ " found for property "
+ p.getPropertyName()
+ " in "
+ p.getEntity());
}
if (ordinals[ordinal]) { getOrderedProperties().forEach(p -> {
throw new HelenusMappingException( int ordinal = p.getOrdinal();
"detected two or more properties with the same ordinal "
+ ordinal
+ " in "
+ p.getEntity());
}
ordinals[ordinal] = true; if (ordinal < 0 || ordinal >= ordinals.length) {
}); throw new HelenusMappingException("invalid ordinal " + ordinal + " found for property "
+ p.getPropertyName() + " in " + p.getEntity());
}
for (int i = 0; i != ordinals.length; ++i) { if (ordinals[ordinal]) {
if (!ordinals[i]) { throw new HelenusMappingException(
throw new HelenusMappingException("detected absent ordinal " + i + " in " + this); "detected two or more properties with the same ordinal " + ordinal + " in " + p.getEntity());
} }
}
}
@Override ordinals[ordinal] = true;
public String toString() { });
StringBuilder str = new StringBuilder(); for (int i = 0; i != ordinals.length; ++i) {
str.append(iface.getSimpleName()) if (!ordinals[i]) {
.append("(") throw new HelenusMappingException("detected absent ordinal " + i + " in " + this);
.append(name.getName()) }
.append(") ") }
.append(type.name().toLowerCase()) }
.append(":\n");
for (HelenusProperty prop : getOrderedProperties()) { @Override
str.append(prop.toString()); public String toString() {
str.append("\n");
} StringBuilder str = new StringBuilder();
return str.toString(); str.append(iface.getSimpleName()).append("(").append(name.getName()).append(") ")
} .append(type.name().toLowerCase()).append(":\n");
for (HelenusProperty prop : getOrderedProperties()) {
str.append(prop.toString());
str.append("\n");
}
return str.toString();
}
} }

View file

@ -15,13 +15,16 @@
*/ */
package net.helenus.mapping; package net.helenus.mapping;
import com.datastax.driver.core.Metadata;
import java.lang.annotation.Annotation; import java.lang.annotation.Annotation;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.lang.reflect.Type; import java.lang.reflect.Type;
import java.util.Optional; import java.util.Optional;
import java.util.function.Function; import java.util.function.Function;
import javax.validation.ConstraintValidator; import javax.validation.ConstraintValidator;
import com.datastax.driver.core.Metadata;
import net.helenus.core.SessionRepository; import net.helenus.core.SessionRepository;
import net.helenus.mapping.javatype.AbstractJavaType; import net.helenus.mapping.javatype.AbstractJavaType;
import net.helenus.mapping.javatype.MappingJavaTypes; import net.helenus.mapping.javatype.MappingJavaTypes;
@ -29,174 +32,171 @@ import net.helenus.mapping.type.AbstractDataType;
public final class HelenusMappingProperty implements HelenusProperty { public final class HelenusMappingProperty implements HelenusProperty {
private final HelenusEntity entity; private final HelenusEntity entity;
private final Method getter; private final Method getter;
private final String propertyName; private final String propertyName;
private final Optional<IdentityName> indexName; private final Optional<IdentityName> indexName;
private final boolean caseSensitiveIndex; private final boolean caseSensitiveIndex;
private final ColumnInformation columnInfo; private final ColumnInformation columnInfo;
private final Type genericJavaType; private final Type genericJavaType;
private final Class<?> javaType; private final Class<?> javaType;
private final AbstractJavaType abstractJavaType; private final AbstractJavaType abstractJavaType;
private final AbstractDataType dataType; private final AbstractDataType dataType;
private final ConstraintValidator<? extends Annotation, ?>[] validators;
private volatile Optional<Function<Object, Object>> readConverter = null;
private volatile Optional<Function<Object, Object>> writeConverter = null;
private volatile Optional<Function<Object, Object>> readConverter = null; public HelenusMappingProperty(HelenusMappingEntity entity, Method getter, Metadata metadata) {
private volatile Optional<Function<Object, Object>> writeConverter = null; this.entity = entity;
this.getter = getter;
private final ConstraintValidator<? extends Annotation, ?>[] validators; this.propertyName = MappingUtil.getPropertyName(getter);
this.indexName = MappingUtil.getIndexName(getter);
this.caseSensitiveIndex = MappingUtil.caseSensitiveIndex(getter);
public HelenusMappingProperty(HelenusMappingEntity entity, Method getter, Metadata metadata) { this.columnInfo = new ColumnInformation(getter);
this.entity = entity;
this.getter = getter;
this.propertyName = MappingUtil.getPropertyName(getter); this.genericJavaType = getter.getGenericReturnType();
this.indexName = MappingUtil.getIndexName(getter); this.javaType = getter.getReturnType();
this.caseSensitiveIndex = MappingUtil.caseSensitiveIndex(getter); this.abstractJavaType = MappingJavaTypes.resolveJavaType(this.javaType);
this.columnInfo = new ColumnInformation(getter); this.dataType = abstractJavaType.resolveDataType(this.getter, this.genericJavaType,
this.columnInfo.getColumnType(), metadata);
this.genericJavaType = getter.getGenericReturnType(); this.validators = MappingUtil.getValidators(getter);
this.javaType = getter.getReturnType(); }
this.abstractJavaType = MappingJavaTypes.resolveJavaType(this.javaType);
this.dataType = @Override
abstractJavaType.resolveDataType( public HelenusEntity getEntity() {
this.getter, this.genericJavaType, this.columnInfo.getColumnType(), metadata); return entity;
}
this.validators = MappingUtil.getValidators(getter); @Override
} public Class<?> getJavaType() {
return (Class<?>) javaType;
}
@Override @Override
public HelenusEntity getEntity() { public AbstractDataType getDataType() {
return entity; return dataType;
} }
@Override @Override
public Class<?> getJavaType() { public ColumnType getColumnType() {
return (Class<?>) javaType; return columnInfo.getColumnType();
} }
@Override @Override
public AbstractDataType getDataType() { public int getOrdinal() {
return dataType; return columnInfo.getOrdinal();
} }
@Override @Override
public ColumnType getColumnType() { public OrderingDirection getOrdering() {
return columnInfo.getColumnType(); return columnInfo.getOrdering();
} }
@Override @Override
public int getOrdinal() { public IdentityName getColumnName() {
return columnInfo.getOrdinal(); return columnInfo.getColumnName();
} }
@Override @Override
public OrderingDirection getOrdering() { public Optional<IdentityName> getIndexName() {
return columnInfo.getOrdering(); return indexName;
} }
@Override @Override
public IdentityName getColumnName() { public boolean caseSensitiveIndex() {
return columnInfo.getColumnName(); return caseSensitiveIndex;
} }
@Override @Override
public Optional<IdentityName> getIndexName() { public String getPropertyName() {
return indexName; return propertyName;
} }
@Override @Override
public boolean caseSensitiveIndex() { public Method getGetterMethod() {
return caseSensitiveIndex; return getter;
} }
@Override @Override
public String getPropertyName() { public Optional<Function<Object, Object>> getReadConverter(SessionRepository repository) {
return propertyName;
}
@Override if (readConverter == null) {
public Method getGetterMethod() { readConverter = abstractJavaType.resolveReadConverter(this.dataType, repository);
return getter; }
}
@Override return readConverter;
public Optional<Function<Object, Object>> getReadConverter(SessionRepository repository) { }
if (readConverter == null) { @Override
readConverter = abstractJavaType.resolveReadConverter(this.dataType, repository); public Optional<Function<Object, Object>> getWriteConverter(SessionRepository repository) {
}
return readConverter; if (writeConverter == null) {
} writeConverter = abstractJavaType.resolveWriteConverter(this.dataType, repository);
}
@Override return writeConverter;
public Optional<Function<Object, Object>> getWriteConverter(SessionRepository repository) { }
if (writeConverter == null) { @Override
writeConverter = abstractJavaType.resolveWriteConverter(this.dataType, repository); public ConstraintValidator<? extends Annotation, ?>[] getValidators() {
} return validators;
}
return writeConverter; @Override
} public String toString() {
@Override StringBuilder str = new StringBuilder();
public ConstraintValidator<? extends Annotation, ?>[] getValidators() {
return validators;
}
@Override String columnName = this.getColumnName().getName();
public String toString() { str.append(" ");
str.append(this.getDataType());
str.append(" ");
str.append(this.getPropertyName());
str.append("(");
if (!columnName.equals(this.getPropertyName())) {
str.append(columnName);
}
str.append(") ");
StringBuilder str = new StringBuilder(); ColumnType type = this.getColumnType();
String columnName = this.getColumnName().getName(); switch (type) {
str.append(" "); case PARTITION_KEY :
str.append(this.getDataType()); str.append("partition_key[");
str.append(" "); str.append(this.getOrdinal());
str.append(this.getPropertyName()); str.append("] ");
str.append("("); break;
if (!columnName.equals(this.getPropertyName())) {
str.append(columnName);
}
str.append(") ");
ColumnType type = this.getColumnType(); case CLUSTERING_COLUMN :
str.append("clustering_column[");
str.append(this.getOrdinal());
str.append("] ");
OrderingDirection od = this.getOrdering();
if (od != null) {
str.append(od.name().toLowerCase()).append(" ");
}
break;
switch (type) { case STATIC_COLUMN :
case PARTITION_KEY: str.append("static ");
str.append("partition_key["); break;
str.append(this.getOrdinal());
str.append("] ");
break;
case CLUSTERING_COLUMN: case COLUMN :
str.append("clustering_column["); break;
str.append(this.getOrdinal()); }
str.append("] ");
OrderingDirection od = this.getOrdering();
if (od != null) {
str.append(od.name().toLowerCase()).append(" ");
}
break;
case STATIC_COLUMN: Optional<IdentityName> idx = this.getIndexName();
str.append("static "); if (idx.isPresent()) {
break; str.append("index(").append(idx.get().getName()).append(") ");
}
case COLUMN: return str.toString();
break; }
}
Optional<IdentityName> idx = this.getIndexName();
if (idx.isPresent()) {
str.append("index(").append(idx.get().getName()).append(") ");
}
return str.toString();
}
} }

View file

@ -19,37 +19,39 @@ import java.lang.annotation.Annotation;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.Optional; import java.util.Optional;
import java.util.function.Function; import java.util.function.Function;
import javax.validation.ConstraintValidator; import javax.validation.ConstraintValidator;
import net.helenus.core.SessionRepository; import net.helenus.core.SessionRepository;
import net.helenus.mapping.type.AbstractDataType; import net.helenus.mapping.type.AbstractDataType;
public interface HelenusProperty { public interface HelenusProperty {
HelenusEntity getEntity(); HelenusEntity getEntity();
String getPropertyName(); String getPropertyName();
Method getGetterMethod(); Method getGetterMethod();
IdentityName getColumnName(); IdentityName getColumnName();
Optional<IdentityName> getIndexName(); Optional<IdentityName> getIndexName();
boolean caseSensitiveIndex(); boolean caseSensitiveIndex();
Class<?> getJavaType(); Class<?> getJavaType();
AbstractDataType getDataType(); AbstractDataType getDataType();
ColumnType getColumnType(); ColumnType getColumnType();
int getOrdinal(); int getOrdinal();
OrderingDirection getOrdering(); OrderingDirection getOrdering();
Optional<Function<Object, Object>> getReadConverter(SessionRepository repository); Optional<Function<Object, Object>> getReadConverter(SessionRepository repository);
Optional<Function<Object, Object>> getWriteConverter(SessionRepository repository); Optional<Function<Object, Object>> getWriteConverter(SessionRepository repository);
ConstraintValidator<? extends Annotation, ?>[] getValidators(); ConstraintValidator<? extends Annotation, ?>[] getValidators();
} }

View file

@ -19,41 +19,41 @@ import net.helenus.support.CqlUtil;
public final class IdentityName { public final class IdentityName {
private final String name; private final String name;
private final boolean forceQuote; private final boolean forceQuote;
public IdentityName(String name, boolean forceQuote) { public IdentityName(String name, boolean forceQuote) {
this.name = name.toLowerCase(); this.name = name.toLowerCase();
this.forceQuote = forceQuote; this.forceQuote = forceQuote;
} }
public static IdentityName of(String name, boolean forceQuote) { public static IdentityName of(String name, boolean forceQuote) {
return new IdentityName(name, forceQuote); return new IdentityName(name, forceQuote);
} }
public String getName() { public String getName() {
return name; return name;
} }
public boolean isForceQuote() { public boolean isForceQuote() {
return forceQuote; return forceQuote;
} }
public String toCql(boolean overrideForceQuote) { public String toCql(boolean overrideForceQuote) {
if (overrideForceQuote) { if (overrideForceQuote) {
return CqlUtil.forceQuote(name); return CqlUtil.forceQuote(name);
} else { } else {
return name; return name;
} }
} }
public String toCql() { public String toCql() {
return toCql(forceQuote); return toCql(forceQuote);
} }
@Override @Override
public String toString() { public String toString() {
return toCql(); return toCql();
} }
} }

View file

@ -20,8 +20,10 @@ import java.lang.reflect.Method;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
import javax.validation.Constraint; import javax.validation.Constraint;
import javax.validation.ConstraintValidator; import javax.validation.ConstraintValidator;
import net.helenus.core.Getter; import net.helenus.core.Getter;
import net.helenus.core.Helenus; import net.helenus.core.Helenus;
import net.helenus.core.reflect.*; import net.helenus.core.reflect.*;
@ -31,255 +33,253 @@ import net.helenus.support.HelenusMappingException;
public final class MappingUtil { public final class MappingUtil {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public static final ConstraintValidator<? extends Annotation, ?>[] EMPTY_VALIDATORS = public static final ConstraintValidator<? extends Annotation, ?>[] EMPTY_VALIDATORS = new ConstraintValidator[0];
new ConstraintValidator[0];
private MappingUtil() {} private MappingUtil() {
}
public static ConstraintValidator<? extends Annotation, ?>[] getValidators(Method getterMethod) { public static ConstraintValidator<? extends Annotation, ?>[] getValidators(Method getterMethod) {
List<ConstraintValidator<? extends Annotation, ?>> list = null; List<ConstraintValidator<? extends Annotation, ?>> list = null;
for (Annotation constraintAnnotation : getterMethod.getDeclaredAnnotations()) { for (Annotation constraintAnnotation : getterMethod.getDeclaredAnnotations()) {
list = addValidators(constraintAnnotation, list); list = addValidators(constraintAnnotation, list);
Class<? extends Annotation> annotationType = constraintAnnotation.annotationType(); Class<? extends Annotation> annotationType = constraintAnnotation.annotationType();
for (Annotation possibleConstraint : annotationType.getDeclaredAnnotations()) { for (Annotation possibleConstraint : annotationType.getDeclaredAnnotations()) {
list = addValidators(possibleConstraint, list); list = addValidators(possibleConstraint, list);
} }
} }
if (list == null) { if (list == null) {
return EMPTY_VALIDATORS; return EMPTY_VALIDATORS;
} else { } else {
return list.toArray(EMPTY_VALIDATORS); return list.toArray(EMPTY_VALIDATORS);
} }
} }
private static List<ConstraintValidator<? extends Annotation, ?>> addValidators( private static List<ConstraintValidator<? extends Annotation, ?>> addValidators(Annotation constraintAnnotation,
Annotation constraintAnnotation, List<ConstraintValidator<? extends Annotation, ?>> list) { List<ConstraintValidator<? extends Annotation, ?>> list) {
Class<? extends Annotation> annotationType = constraintAnnotation.annotationType(); Class<? extends Annotation> annotationType = constraintAnnotation.annotationType();
for (Annotation possibleConstraint : annotationType.getDeclaredAnnotations()) { for (Annotation possibleConstraint : annotationType.getDeclaredAnnotations()) {
if (possibleConstraint instanceof Constraint) { if (possibleConstraint instanceof Constraint) {
Constraint constraint = (Constraint) possibleConstraint; Constraint constraint = (Constraint) possibleConstraint;
for (Class<? extends ConstraintValidator<?, ?>> clazz : constraint.validatedBy()) { for (Class<? extends ConstraintValidator<?, ?>> clazz : constraint.validatedBy()) {
ConstraintValidator<? extends Annotation, ?> validator = ConstraintValidator<? extends Annotation, ?> validator = ReflectionInstantiator
ReflectionInstantiator.instantiateClass(clazz); .instantiateClass(clazz);
((ConstraintValidator) validator).initialize(constraintAnnotation); ((ConstraintValidator) validator).initialize(constraintAnnotation);
if (list == null) { if (list == null) {
list = new ArrayList<ConstraintValidator<? extends Annotation, ?>>(); list = new ArrayList<ConstraintValidator<? extends Annotation, ?>>();
} }
list.add(validator); list.add(validator);
} }
} }
} }
return list; return list;
} }
public static Optional<IdentityName> getIndexName(Method getterMethod) { public static Optional<IdentityName> getIndexName(Method getterMethod) {
String indexName = null; String indexName = null;
boolean forceQuote = false; boolean forceQuote = false;
Index index = getterMethod.getDeclaredAnnotation(Index.class); Index index = getterMethod.getDeclaredAnnotation(Index.class);
if (index != null) { if (index != null) {
indexName = index.value(); indexName = index.value();
forceQuote = index.forceQuote(); forceQuote = index.forceQuote();
if (indexName == null || indexName.isEmpty()) { if (indexName == null || indexName.isEmpty()) {
indexName = getDefaultColumnName(getterMethod); indexName = getDefaultColumnName(getterMethod);
} }
} }
return indexName != null return indexName != null ? Optional.of(new IdentityName(indexName, forceQuote)) : Optional.empty();
? Optional.of(new IdentityName(indexName, forceQuote)) }
: Optional.empty();
}
public static boolean caseSensitiveIndex(Method getterMethod) { public static boolean caseSensitiveIndex(Method getterMethod) {
Index index = getterMethod.getDeclaredAnnotation(Index.class); Index index = getterMethod.getDeclaredAnnotation(Index.class);
if (index != null) { if (index != null) {
return index.caseSensitive(); return index.caseSensitive();
} }
return false; return false;
} }
public static String getPropertyName(Method getter) { public static String getPropertyName(Method getter) {
return getter.getName(); return getter.getName();
} }
public static String getDefaultColumnName(Method getter) { public static String getDefaultColumnName(Method getter) {
return Helenus.settings().getPropertyToColumnConverter().apply(getPropertyName(getter)); return Helenus.settings().getPropertyToColumnConverter().apply(getPropertyName(getter));
} }
public static IdentityName getUserDefinedTypeName(Class<?> iface, boolean required) { public static IdentityName getUserDefinedTypeName(Class<?> iface, boolean required) {
String userTypeName = null; String userTypeName = null;
boolean forceQuote = false; boolean forceQuote = false;
UDT userDefinedType = iface.getDeclaredAnnotation(UDT.class); UDT userDefinedType = iface.getDeclaredAnnotation(UDT.class);
if (userDefinedType != null) { if (userDefinedType != null) {
userTypeName = userDefinedType.value(); userTypeName = userDefinedType.value();
forceQuote = userDefinedType.forceQuote(); forceQuote = userDefinedType.forceQuote();
if (userTypeName == null || userTypeName.isEmpty()) { if (userTypeName == null || userTypeName.isEmpty()) {
userTypeName = getDefaultEntityName(iface); userTypeName = getDefaultEntityName(iface);
} }
return new IdentityName(userTypeName, forceQuote); return new IdentityName(userTypeName, forceQuote);
} }
if (required) { if (required) {
throw new HelenusMappingException("entity must have annotation @UserDefinedType " + iface); throw new HelenusMappingException("entity must have annotation @UserDefinedType " + iface);
} }
return null; return null;
} }
public static boolean isTuple(Class<?> iface) { public static boolean isTuple(Class<?> iface) {
Tuple tuple = iface.getDeclaredAnnotation(Tuple.class); Tuple tuple = iface.getDeclaredAnnotation(Tuple.class);
return tuple != null; return tuple != null;
} }
public static boolean isUDT(Class<?> iface) { public static boolean isUDT(Class<?> iface) {
UDT udt = iface.getDeclaredAnnotation(UDT.class); UDT udt = iface.getDeclaredAnnotation(UDT.class);
return udt != null; return udt != null;
} }
public static IdentityName getViewName(Class<?> iface, boolean required) { public static IdentityName getViewName(Class<?> iface, boolean required) {
String viewName = null; String viewName = null;
boolean forceQuote = false; boolean forceQuote = false;
MaterializedView view = iface.getDeclaredAnnotation(MaterializedView.class); MaterializedView view = iface.getDeclaredAnnotation(MaterializedView.class);
if (view != null) { if (view != null) {
viewName = view.value(); viewName = view.value();
forceQuote = view.forceQuote(); forceQuote = view.forceQuote();
} else if (required) { } else if (required) {
throw new HelenusMappingException("entity must have annotation @Table " + iface); throw new HelenusMappingException("entity must have annotation @Table " + iface);
} }
if (viewName == null || viewName.isEmpty()) { if (viewName == null || viewName.isEmpty()) {
viewName = getDefaultEntityName(iface); viewName = getDefaultEntityName(iface);
} }
return new IdentityName(viewName, forceQuote); return new IdentityName(viewName, forceQuote);
} }
public static IdentityName getTableName(Class<?> iface, boolean required) { public static IdentityName getTableName(Class<?> iface, boolean required) {
String tableName = null; String tableName = null;
boolean forceQuote = false; boolean forceQuote = false;
Table table = iface.getDeclaredAnnotation(Table.class); Table table = iface.getDeclaredAnnotation(Table.class);
if (table != null) { if (table != null) {
tableName = table.value(); tableName = table.value();
forceQuote = table.forceQuote(); forceQuote = table.forceQuote();
} else if (required) { } else if (required) {
throw new HelenusMappingException("entity must have annotation @Table " + iface); throw new HelenusMappingException("entity must have annotation @Table " + iface);
} }
if (tableName == null || tableName.isEmpty()) { if (tableName == null || tableName.isEmpty()) {
tableName = getDefaultEntityName(iface); tableName = getDefaultEntityName(iface);
} }
return new IdentityName(tableName, forceQuote); return new IdentityName(tableName, forceQuote);
} }
public static String getDefaultEntityName(Class<?> iface) { public static String getDefaultEntityName(Class<?> iface) {
return Helenus.settings().getPropertyToColumnConverter().apply(iface.getSimpleName()); return Helenus.settings().getPropertyToColumnConverter().apply(iface.getSimpleName());
} }
public static Class<?> getMappingInterface(Object pojo) { public static Class<?> getMappingInterface(Object pojo) {
Class<?> iface = null; Class<?> iface = null;
if (pojo instanceof Class) { if (pojo instanceof Class) {
iface = (Class<?>) pojo; iface = (Class<?>) pojo;
if (!iface.isInterface()) { if (!iface.isInterface()) {
throw new HelenusMappingException("expected interface " + iface); throw new HelenusMappingException("expected interface " + iface);
} }
} else { } else {
Class<?>[] ifaces = pojo.getClass().getInterfaces(); Class<?>[] ifaces = pojo.getClass().getInterfaces();
int len = ifaces.length; int len = ifaces.length;
for (int i = 0; i != len; ++i) { for (int i = 0; i != len; ++i) {
iface = ifaces[0]; iface = ifaces[0];
if (MapExportable.class.isAssignableFrom(iface)) { if (MapExportable.class.isAssignableFrom(iface)) {
continue; continue;
} }
if (iface.getDeclaredAnnotation(Table.class) != null if (iface.getDeclaredAnnotation(Table.class) != null
|| iface.getDeclaredAnnotation(MaterializedView.class) != null || iface.getDeclaredAnnotation(MaterializedView.class) != null
|| iface.getDeclaredAnnotation(UDT.class) != null || iface.getDeclaredAnnotation(UDT.class) != null
|| iface.getDeclaredAnnotation(Tuple.class) != null) { || iface.getDeclaredAnnotation(Tuple.class) != null) {
break; break;
} }
} }
} }
if (iface == null) { if (iface == null) {
throw new HelenusMappingException("dsl interface not found for " + pojo); throw new HelenusMappingException("dsl interface not found for " + pojo);
} }
return iface; return iface;
} }
public static HelenusPropertyNode resolveMappingProperty(Getter<?> getter) { public static HelenusPropertyNode resolveMappingProperty(Getter<?> getter) {
try { try {
Object childDsl = getter.get(); Object childDsl = getter.get();
if (childDsl instanceof DslExportable) { if (childDsl instanceof DslExportable) {
DslExportable e = (DslExportable) childDsl; DslExportable e = (DslExportable) childDsl;
return e.getParentDslHelenusPropertyNode(); return e.getParentDslHelenusPropertyNode();
} else if (childDsl instanceof MapDsl) { } else if (childDsl instanceof MapDsl) {
MapDsl mapDsl = (MapDsl) childDsl; MapDsl mapDsl = (MapDsl) childDsl;
return mapDsl.getParent(); return mapDsl.getParent();
} else if (childDsl instanceof ListDsl) { } else if (childDsl instanceof ListDsl) {
ListDsl listDsl = (ListDsl) childDsl; ListDsl listDsl = (ListDsl) childDsl;
return listDsl.getParent(); return listDsl.getParent();
} else if (childDsl instanceof SetDsl) { } else if (childDsl instanceof SetDsl) {
SetDsl setDsl = (SetDsl) childDsl; SetDsl setDsl = (SetDsl) childDsl;
return setDsl.getParent(); return setDsl.getParent();
} }
throw new HelenusMappingException("getter must reference to the dsl object " + getter); throw new HelenusMappingException("getter must reference to the dsl object " + getter);
} catch (DslPropertyException e) { } catch (DslPropertyException e) {
return e.getPropertyNode(); return e.getPropertyNode();
} }
} }
} }

View file

@ -18,28 +18,28 @@ package net.helenus.mapping;
import net.helenus.support.HelenusMappingException; import net.helenus.support.HelenusMappingException;
public enum OrderingDirection { public enum OrderingDirection {
ASC("ASC"), ASC("ASC"),
DESC("DESC"); DESC("DESC");
private final String cql; private final String cql;
private OrderingDirection(String cql) { private OrderingDirection(String cql) {
this.cql = cql; this.cql = cql;
} }
public String cql() { public static OrderingDirection parseString(String name) {
return cql;
}
public static OrderingDirection parseString(String name) { if (ASC.cql.equalsIgnoreCase(name)) {
return ASC;
} else if (DESC.cql.equalsIgnoreCase(name)) {
return DESC;
}
if (ASC.cql.equalsIgnoreCase(name)) { throw new HelenusMappingException("invalid ordering direction name " + name);
return ASC; }
} else if (DESC.cql.equalsIgnoreCase(name)) {
return DESC;
}
throw new HelenusMappingException("invalid ordering direction name " + name); public String cql() {
} return cql;
}
} }

View file

@ -18,17 +18,16 @@ package net.helenus.mapping;
import java.util.Comparator; import java.util.Comparator;
public enum TypeAndOrdinalColumnComparator implements Comparator<HelenusProperty> { public enum TypeAndOrdinalColumnComparator implements Comparator<HelenusProperty> {
INSTANCE; INSTANCE;
public int compare(HelenusProperty thisVal, HelenusProperty anotherVal) { public int compare(HelenusProperty thisVal, HelenusProperty anotherVal) {
int c = int c = Integer.compare(thisVal.getColumnType().ordinal(), anotherVal.getColumnType().ordinal());
Integer.compare(thisVal.getColumnType().ordinal(), anotherVal.getColumnType().ordinal());
if (c == 0) { if (c == 0) {
c = Integer.compare(thisVal.getOrdinal(), anotherVal.getOrdinal()); c = Integer.compare(thisVal.getOrdinal(), anotherVal.getOrdinal());
} }
return c; return c;
} }
} }

View file

@ -19,78 +19,93 @@ import java.lang.annotation.ElementType;
import java.lang.annotation.Retention; import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy; import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target; import java.lang.annotation.Target;
import net.helenus.mapping.OrderingDirection; import net.helenus.mapping.OrderingDirection;
/** /**
* ClusteringColumn is the family column in legacy Cassandra API * ClusteringColumn is the family column in legacy Cassandra API
* *
* <p>The purpose of this column is have additional dimension in the table. Both @PartitionKey * <p>
* and @ClusteringColumn together are parts of the primary key of the table. The primary difference * The purpose of this column is have additional dimension in the table.
* between them is that the first one is using for routing purposes in order to locate a data node * Both @PartitionKey and @ClusteringColumn together are parts of the primary
* in the cluster, otherwise the second one is using inside the node to locate peace of data in * key of the table. The primary difference between them is that the first one
* is using for routing purposes in order to locate a data node in the cluster,
* otherwise the second one is using inside the node to locate peace of data in
* concrete machine. * concrete machine.
* *
* <p>ClusteringColumn can be represented as a Key in SortedMap that fully stored in a single node. * <p>
* All developers must be careful for selecting fields for clustering columns, because all data * ClusteringColumn can be represented as a Key in SortedMap that fully stored
* inside this SortedMap must fit in to one node. * in a single node. All developers must be careful for selecting fields for
* clustering columns, because all data inside this SortedMap must fit in to one
* node.
* *
* <p>ClusteringColumn can have more than one part and the order of parts is important. This order * <p>
* defines the way how Cassandra joins the parts and influence of data retrieval operations. Each * ClusteringColumn can have more than one part and the order of parts is
* part can have ordering property that defines default ascending or descending order of data. In * important. This order defines the way how Cassandra joins the parts and
* case of two and more parts in select queries developer needs to have consisdent order of all * influence of data retrieval operations. Each part can have ordering property
* parts as they defined in table. * that defines default ascending or descending order of data. In case of two
* and more parts in select queries developer needs to have consisdent order of
* all parts as they defined in table.
* *
* <p>For example, first part is ASC ordering, second is also ASC, so Cassandra will sort entries * <p>
* like this: a-a a-b b-a b-b In this case we are able run queries: ORDER BY first ASC, second ASC * For example, first part is ASC ordering, second is also ASC, so Cassandra
* ORDER BY first DESC, second DESC WHERE first=? ORDER BY second ASC WHERE first=? ORDER BY second * will sort entries like this: a-a a-b b-a b-b In this case we are able run
* DESC WHERE first=? AND second=? * queries: ORDER BY first ASC, second ASC ORDER BY first DESC, second DESC
* WHERE first=? ORDER BY second ASC WHERE first=? ORDER BY second DESC WHERE
* first=? AND second=?
* *
* <p>But, we can not run queries: ORDER BY first DESC, second ASC ORDER BY first ASC, second DESC * <p>
* WHERE second=? ORDER BY first (ASC,DESC) * But, we can not run queries: ORDER BY first DESC, second ASC ORDER BY first
* ASC, second DESC WHERE second=? ORDER BY first (ASC,DESC)
*/ */
@Retention(value = RetentionPolicy.RUNTIME) @Retention(value = RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface ClusteringColumn { public @interface ClusteringColumn {
/** /**
* Default value is the name of the method normalized to underscore * Default value is the name of the method normalized to underscore
* *
* @return name of the column * @return name of the column
*/ */
String value() default ""; String value() default "";
/** /**
* ClusteringColumn parts must be ordered in the @Table. It is the requirement of Cassandra. * ClusteringColumn parts must be ordered in the @Table. It is the requirement
* Cassandra joins all parts to the final clustering key that is stored in column family name. * of Cassandra. Cassandra joins all parts to the final clustering key that is
* Additionally all parts can have some ordering (ASC, DESC) that with sequence of parts * stored in column family name. Additionally all parts can have some ordering
* determines key comparison function, so Cassandra storing column family names always in sorted * (ASC, DESC) that with sequence of parts determines key comparison function,
* order. * so Cassandra storing column family names always in sorted order.
* *
* <p>Be default ordinal has 0 value, that's because in most cases @Table have single column for * <p>
* ClusteringColumn If you have 2 and more parts of the ClusteringColumn, then you need to use * Be default ordinal has 0 value, that's because in most cases @Table have
* ordinal() to define the sequence of the parts * single column for ClusteringColumn If you have 2 and more parts of the
* * ClusteringColumn, then you need to use ordinal() to define the sequence of
* @return number that used to sort clustering columns * the parts
*/ *
int ordinal() default 0; * @return number that used to sort clustering columns
*/
int ordinal() default 0;
/** /**
* Default order of values in the ClusteringColumn This ordering is using for comparison of the * Default order of values in the ClusteringColumn This ordering is using for
* clustering column values when Cassandra stores it in the sorted order. * comparison of the clustering column values when Cassandra stores it in the
* * sorted order.
* <p>Default value is the ascending order *
* * <p>
* @return ascending order or descending order of clustering column values * Default value is the ascending order
*/ *
OrderingDirection ordering() default OrderingDirection.ASC; * @return ascending order or descending order of clustering column values
*/
OrderingDirection ordering() default OrderingDirection.ASC;
/** /**
* For reserved words in Cassandra we need quotation in CQL queries. This property marks that the * For reserved words in Cassandra we need quotation in CQL queries. This
* name of the UDT type needs to be quoted. * property marks that the name of the UDT type needs to be quoted.
* *
* <p>Default value is false, we are quoting only selected names. * <p>
* * Default value is false, we are quoting only selected names.
* @return true if name have to be quoted *
*/ * @return true if name have to be quoted
boolean forceQuote() default false; */
boolean forceQuote() default false;
} }

View file

@ -18,45 +18,51 @@ package net.helenus.mapping.annotation;
import java.lang.annotation.*; import java.lang.annotation.*;
/** /**
* Column annotation is used to define additional properties of the column in entity mapping * Column annotation is used to define additional properties of the column in
* interfaces: @Table, @UDT, @Tuple * entity mapping interfaces: @Table, @UDT, @Tuple
* *
* <p>Column annotation can be used to override default name of the column or to setup order of the * <p>
* columns in the mapping * Column annotation can be used to override default name of the column or to
* setup order of the columns in the mapping
* *
* <p>Usually for @Table and @UDT types it is not important to define order of the columns, but * <p>
* in @Tuple mapping it is required, because tuple itself represents the sequence of the types with * Usually for @Table and @UDT types it is not important to define order of the
* particular order in the table's column * columns, but in @Tuple mapping it is required, because tuple itself
* represents the sequence of the types with particular order in the table's
* column
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Column { public @interface Column {
/** /**
* Default value is the name of the method normalized to underscore * Default value is the name of the method normalized to underscore
* *
* @return name of the column * @return name of the column
*/ */
String value() default ""; String value() default "";
/** /**
* Ordinal will be used for ascending sorting of columns * Ordinal will be used for ascending sorting of columns
* *
* <p>Default value is 0, because not all mapping entities require all fields to have unique * <p>
* ordinals, only @Tuple mapping entity requires all of them to be unique. * Default value is 0, because not all mapping entities require all fields to
* * have unique ordinals, only @Tuple mapping entity requires all of them to be
* @return number that used to sort columns, usually for @Tuple only * unique.
*/ *
int ordinal() default 0; * @return number that used to sort columns, usually for @Tuple only
*/
int ordinal() default 0;
/** /**
* For reserved words in Cassandra we need quotation in CQL queries. This property marks that the * For reserved words in Cassandra we need quotation in CQL queries. This
* name of the UDT type needs to be quoted. * property marks that the name of the UDT type needs to be quoted.
* *
* <p>Default value is false, we are quoting only selected names. * <p>
* * Default value is false, we are quoting only selected names.
* @return true if name have to be quoted *
*/ * @return true if name have to be quoted
boolean forceQuote() default false; */
boolean forceQuote() default false;
} }

View file

@ -16,200 +16,240 @@
package net.helenus.mapping.annotation; package net.helenus.mapping.annotation;
import java.lang.annotation.*; import java.lang.annotation.*;
import javax.validation.Constraint; import javax.validation.Constraint;
import net.helenus.mapping.validator.*; import net.helenus.mapping.validator.*;
/** /**
* Constraint annotations are using for data integrity mostly for @java.lang.String types. The place * Constraint annotations are using for data integrity mostly
* of the annotation is the particular method in model interface. * for @java.lang.String types. The place of the annotation is the particular
* method in model interface.
* *
* <p>All of them does not have effect on selects and data retrieval operations. * <p>
* All of them does not have effect on selects and data retrieval operations.
* *
* <p>Support types: - @NotNull supports any @java.lang.Object type - All annotations * <p>
* support @java.lang.String type * Support types: - @NotNull supports any @java.lang.Object type - All
* annotations support @java.lang.String type
*/ */
public final class Constraints { public final class Constraints {
private Constraints() {} private Constraints() {
}
/** /**
* NotNull annotation is using to check that value is not null before storing it * NotNull annotation is using to check that value is not null before storing it
* *
* <p>Applicable to use in any @java.lang.Object * <p>
* * Applicable to use in any @java.lang.Object
* <p>It does not check on selects and data retrieval operations *
*/ * <p>
@Documented * It does not check on selects and data retrieval operations
@Retention(RetentionPolicy.RUNTIME) */
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Documented
@Constraint(validatedBy = NotNullValidator.class) @Retention(RetentionPolicy.RUNTIME)
public @interface NotNull {} @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = NotNullValidator.class)
public @interface NotNull {
}
/** /**
* NotEmpty annotation is using to check that value has text before storing it * NotEmpty annotation is using to check that value has text before storing it
* *
* <p>Also checks for the null and it is more strict annotation then @NotNull * <p>
* * Also checks for the null and it is more strict annotation then @NotNull
* <p>Can be used for @java.lang.CharSequence, @ByteBuffer and any array *
* * <p>
* <p>It does not check on selects and data retrieval operations * Can be used for @java.lang.CharSequence, @ByteBuffer and any array
*/ *
@Documented * <p>
@Retention(RetentionPolicy.RUNTIME) * It does not check on selects and data retrieval operations
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) */
@Constraint(validatedBy = NotEmptyValidator.class) @Documented
public @interface NotEmpty {} @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = NotEmptyValidator.class)
public @interface NotEmpty {
}
/** /**
* Email annotation is using to check that value has a valid email before storing it * Email annotation is using to check that value has a valid email before
* * storing it
* <p>Can be used only for @CharSequence *
* * <p>
* <p>It does not check on selects and data retrieval operations * Can be used only for @CharSequence
*/ *
@Documented * <p>
@Retention(RetentionPolicy.RUNTIME) * It does not check on selects and data retrieval operations
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) */
@Constraint(validatedBy = EmailValidator.class) @Documented
public @interface Email {} @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = EmailValidator.class)
public @interface Email {
}
/** /**
* Number annotation is using to check that all letters in value are digits before storing it * Number annotation is using to check that all letters in value are digits
* * before storing it
* <p>Can be used only for @java.lang.CharSequence *
* * <p>
* <p>It does not check on selects and data retrieval operations * Can be used only for @java.lang.CharSequence
*/ *
@Documented * <p>
@Retention(RetentionPolicy.RUNTIME) * It does not check on selects and data retrieval operations
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) */
@Constraint(validatedBy = NumberValidator.class) @Documented
public @interface Number {} @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = NumberValidator.class)
public @interface Number {
}
/** /**
* Alphabet annotation is using to check that all letters in value are in specific alphabet before * Alphabet annotation is using to check that all letters in value are in
* storing it * specific alphabet before storing it
* *
* <p>Can be used only for @java.lang.CharSequence * <p>
* * Can be used only for @java.lang.CharSequence
* <p>It does not check on selects and data retrieval operations *
*/ * <p>
@Documented * It does not check on selects and data retrieval operations
@Retention(RetentionPolicy.RUNTIME) */
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Documented
@Constraint(validatedBy = AlphabetValidator.class) @Retention(RetentionPolicy.RUNTIME)
public @interface Alphabet { @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = AlphabetValidator.class)
public @interface Alphabet {
/** /**
* Defines alphabet that will be used to check value * Defines alphabet that will be used to check value
* *
* @return alphabet characters in the string * @return alphabet characters in the string
*/ */
String value(); String value();
} }
/** /**
* Length annotation is using to ensure that value has exact length before storing it * Length annotation is using to ensure that value has exact length before
* * storing it
* <p>Can be used for @java.lang.CharSequence, @ByteBuffer and any array *
* * <p>
* <p>It does not have effect on selects and data retrieval operations * Can be used for @java.lang.CharSequence, @ByteBuffer and any array
*/ *
@Documented * <p>
@Retention(RetentionPolicy.RUNTIME) * It does not have effect on selects and data retrieval operations
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) */
@Constraint(validatedBy = LengthValidator.class) @Documented
public @interface Length { @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = LengthValidator.class)
public @interface Length {
int value(); int value();
} }
/** /**
* MaxLength annotation is using to ensure that value has length less or equal to some threshold * MaxLength annotation is using to ensure that value has length less or equal
* before storing it * to some threshold before storing it
* *
* <p>Can be used for @java.lang.CharSequence, @ByteBuffer and byte[] * <p>
* * Can be used for @java.lang.CharSequence, @ByteBuffer and byte[]
* <p>It does not have effect on selects and data retrieval operations *
*/ * <p>
@Documented * It does not have effect on selects and data retrieval operations
@Retention(RetentionPolicy.RUNTIME) */
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Documented
@Constraint(validatedBy = MaxLengthValidator.class) @Retention(RetentionPolicy.RUNTIME)
public @interface MaxLength { @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = MaxLengthValidator.class)
public @interface MaxLength {
int value(); int value();
} }
/** /**
* MinLength annotation is using to ensure that value has length greater or equal to some * MinLength annotation is using to ensure that value has length greater or
* threshold before storing it * equal to some threshold before storing it
* *
* <p>Can be used for @java.lang.CharSequence, @ByteBuffer and byte[] * <p>
* * Can be used for @java.lang.CharSequence, @ByteBuffer and byte[]
* <p>It does not have effect on selects and data retrieval operations *
*/ * <p>
@Documented * It does not have effect on selects and data retrieval operations
@Retention(RetentionPolicy.RUNTIME) */
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Documented
@Constraint(validatedBy = MinLengthValidator.class) @Retention(RetentionPolicy.RUNTIME)
public @interface MinLength { @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = MinLengthValidator.class)
public @interface MinLength {
int value(); int value();
} }
/** /**
* LowerCase annotation is using to ensure that value is in lower case before storing it * LowerCase annotation is using to ensure that value is in lower case before
* * storing it
* <p>Can be used only for @java.lang.CharSequence *
* * <p>
* <p>It does not have effect on selects and data retrieval operations * Can be used only for @java.lang.CharSequence
*/ *
@Documented * <p>
@Retention(RetentionPolicy.RUNTIME) * It does not have effect on selects and data retrieval operations
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) */
@Constraint(validatedBy = LowerCaseValidator.class) @Documented
public @interface LowerCase {} @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = LowerCaseValidator.class)
public @interface LowerCase {
}
/** /**
* UpperCase annotation is using to ensure that value is in upper case before storing it * UpperCase annotation is using to ensure that value is in upper case before
* * storing it
* <p>Can be used only for @java.lang.CharSequence *
* * <p>
* <p>It does not have effect on selects and data retrieval operations * Can be used only for @java.lang.CharSequence
*/ *
@Documented * <p>
@Retention(RetentionPolicy.RUNTIME) * It does not have effect on selects and data retrieval operations
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) */
@Constraint(validatedBy = UpperCaseValidator.class) @Documented
public @interface UpperCase {} @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = UpperCaseValidator.class)
public @interface UpperCase {
}
/** /**
* Pattern annotation is LowerCase annotation is using to ensure that value is upper case before * Pattern annotation is LowerCase annotation is using to ensure that value is
* storing it * upper case before storing it
* *
* <p>Can be used only for @java.lang.CharSequence * <p>
* * Can be used only for @java.lang.CharSequence
* <p>It does not have effect on selects and data retrieval operations *
*/ * <p>
@Documented * It does not have effect on selects and data retrieval operations
@Retention(RetentionPolicy.RUNTIME) */
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Documented
@Constraint(validatedBy = PatternValidator.class) @Retention(RetentionPolicy.RUNTIME)
public @interface Pattern { @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = PatternValidator.class)
public @interface Pattern {
/** /**
* User defined regex expression to check match of the value * User defined regex expression to check match of the value
* *
* @return Java regex pattern * @return Java regex pattern
*/ */
String value(); String value();
/** /**
* Regex flags composition * Regex flags composition
* *
* @return Java regex flags * @return Java regex flags
*/ */
int flags(); int flags();
} }
} }

View file

@ -3,48 +3,52 @@ package net.helenus.mapping.annotation;
import java.lang.annotation.*; import java.lang.annotation.*;
/** /**
* CoveringIndex annotation is using under the specific column or method in entity interface * CoveringIndex annotation is using under the specific column or method in
* with @Table annotation. * entity interface with @Table annotation.
* *
* <p>A corresponding materialized view will be created based on the underline @Table for the * <p>
* specific column. * A corresponding materialized view will be created based on the
* underline @Table for the specific column.
* *
* <p>This is useful when you need to perform IN or SORT/ORDER-BY queries and to do so you'll need * <p>
* different materialized table on disk in Cassandra. * This is useful when you need to perform IN or SORT/ORDER-BY queries and to do
* so you'll need different materialized table on disk in Cassandra.
* *
* <p>For each @Table annotated interface Helenus will create/update/verify Cassandra Materialized * <p>
* Views and some indexes if needed on startup. * For each @Table annotated interface Helenus will create/update/verify
* Cassandra Materialized Views and some indexes if needed on startup.
*/ */
@Inherited @Inherited
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.TYPE}) @Target({ElementType.TYPE})
public @interface CoveringIndex { public @interface CoveringIndex {
/** /**
* Defined the name of the index. By default the entity name with column name as suffix. * Defined the name of the index. By default the entity name with column name as
* * suffix.
* @return name of the covering index *
*/ * @return name of the covering index
String name() default ""; */
String name() default "";
/** /**
* Set of fields in this entity to replicate in the index. * Set of fields in this entity to replicate in the index.
* *
* @return array of the string names of the fields. * @return array of the string names of the fields.
*/ */
String[] covering() default ""; String[] covering() default "";
/** /**
* Set of fields to use as the partition keys for this projection. * Set of fields to use as the partition keys for this projection.
* *
* @return array of the string names of the fields. * @return array of the string names of the fields.
*/ */
String[] partitionKeys() default ""; String[] partitionKeys() default "";
/** /**
* Set of fields to use as the clustering columns for this projection. * Set of fields to use as the clustering columns for this projection.
* *
* @return array of the string names of the fields. * @return array of the string names of the fields.
*/ */
String[] clusteringColumns() default ""; String[] clusteringColumns() default "";
} }

Some files were not shown because too many files have changed in this diff Show more