Merge branch 'develop'
This commit is contained in:
commit
dcc0927a4a
232 changed files with 11166 additions and 10234 deletions
57
NOTES
57
NOTES
|
@ -1,3 +1,8 @@
|
|||
|
||||
|
||||
|
||||
|
||||
|
||||
--- Cache
|
||||
// `E` is the type of the Entity class or one of:
|
||||
// - ResultSet
|
||||
|
@ -315,3 +320,55 @@ begin:
|
|||
}
|
||||
};
|
||||
}
|
||||
----------------------------------
|
||||
if ("ttl".equals(methodName) && method.getParameterCount() == 1 && method.getReturnType() == int.class) {
|
||||
Getter getter = (Getter) args[0];
|
||||
if (getter == null) {
|
||||
return false;
|
||||
}
|
||||
HelenusProperty prop = MappingUtil.resolveMappingProperty(getter).getProperty();
|
||||
String getterName = prop.getPropertyName();
|
||||
String ttlKeyForProperty = prop.getColumnName().toCql() + "_ttl";
|
||||
if (src.containsKey(ttlKeyForProperty)) {
|
||||
return src.get(ttlKeyForProperty);
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
if ("written".equals(methodName) && method.getParameterCount() == 1 && method.getReturnType() == int.class) {
|
||||
Getter getter = (Getter) args[0];
|
||||
if (getter == null) {
|
||||
return false;
|
||||
}
|
||||
HelenusProperty prop = MappingUtil.resolveMappingProperty(getter).getProperty();
|
||||
String getterName = prop.getPropertyName();
|
||||
String ttlKeyForProperty = prop.getColumnName().toCql() + "_ttl";
|
||||
if (src.containsKey(ttlKeyForProperty)) {
|
||||
return src.get(ttlKeyForProperty);
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
-----------------
|
||||
|
||||
|
||||
/*else {
|
||||
Cache<String, Object> cache = session.getSessionCache();
|
||||
Map<String, Object> rowMap = this.cache.rowMap();
|
||||
for (String rowKey : rowMap.keySet()) {
|
||||
String keys = flattenFacets(facets);
|
||||
for (String key : keys) {
|
||||
Object value = cache.getIfPresent(key);
|
||||
if (value != null) {
|
||||
result = Optional.of(value);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
cache.put
|
||||
}
|
||||
*/
|
||||
|
|
|
@ -64,7 +64,6 @@ dependencies {
|
|||
compile group: 'org.aspectj', name: 'aspectjweaver', version: '1.8.10'
|
||||
compile group: 'org.apache.commons', name: 'commons-lang3', version: '3.6'
|
||||
compile group: 'org.springframework', name: 'spring-core', version: '4.3.10.RELEASE'
|
||||
|
||||
compile group: 'com.google.guava', name: 'guava', version: '20.0'
|
||||
compile group: 'com.diffplug.durian', name: 'durian', version: '3.+'
|
||||
compile group: 'io.zipkin.java', name: 'zipkin', version: '1.29.2'
|
||||
|
|
|
@ -5,19 +5,19 @@ import java.util.List;
|
|||
|
||||
public class DefaultMetadata extends Metadata {
|
||||
|
||||
public DefaultMetadata() {
|
||||
super(null);
|
||||
}
|
||||
public DefaultMetadata() {
|
||||
super(null);
|
||||
}
|
||||
|
||||
private DefaultMetadata(Cluster.Manager cluster) {
|
||||
super(cluster);
|
||||
}
|
||||
private DefaultMetadata(Cluster.Manager cluster) {
|
||||
super(cluster);
|
||||
}
|
||||
|
||||
public TupleType newTupleType(DataType... types) {
|
||||
return newTupleType(Arrays.asList(types));
|
||||
}
|
||||
public TupleType newTupleType(DataType... types) {
|
||||
return newTupleType(Arrays.asList(types));
|
||||
}
|
||||
|
||||
public TupleType newTupleType(List<DataType> types) {
|
||||
return new TupleType(types, ProtocolVersion.NEWEST_SUPPORTED, CodecRegistry.DEFAULT_INSTANCE);
|
||||
}
|
||||
public TupleType newTupleType(List<DataType> types) {
|
||||
return new TupleType(types, ProtocolVersion.NEWEST_SUPPORTED, CodecRegistry.DEFAULT_INSTANCE);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,34 +15,35 @@
|
|||
*/
|
||||
package com.datastax.driver.core.querybuilder;
|
||||
|
||||
import com.datastax.driver.core.CodecRegistry;
|
||||
import java.util.List;
|
||||
|
||||
import com.datastax.driver.core.CodecRegistry;
|
||||
|
||||
public class IsNotNullClause extends Clause {
|
||||
|
||||
final String name;
|
||||
final String name;
|
||||
|
||||
public IsNotNullClause(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
public IsNotNullClause(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
@Override
|
||||
String name() {
|
||||
return name;
|
||||
}
|
||||
@Override
|
||||
String name() {
|
||||
return name;
|
||||
}
|
||||
|
||||
@Override
|
||||
Object firstValue() {
|
||||
return null;
|
||||
}
|
||||
@Override
|
||||
Object firstValue() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
void appendTo(StringBuilder sb, List<Object> variables, CodecRegistry codecRegistry) {
|
||||
Utils.appendName(name, sb).append(" IS NOT NULL");
|
||||
}
|
||||
@Override
|
||||
void appendTo(StringBuilder sb, List<Object> variables, CodecRegistry codecRegistry) {
|
||||
Utils.appendName(name, sb).append(" IS NOT NULL");
|
||||
}
|
||||
|
||||
@Override
|
||||
boolean containsBindMarker() {
|
||||
return false;
|
||||
}
|
||||
@Override
|
||||
boolean containsBindMarker() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,157 +1,148 @@
|
|||
package com.datastax.driver.core.schemabuilder;
|
||||
|
||||
import static com.datastax.driver.core.schemabuilder.SchemaStatement.STATEMENT_START;
|
||||
import static com.datastax.driver.core.schemabuilder.SchemaStatement.validateNotEmpty;
|
||||
import static com.datastax.driver.core.schemabuilder.SchemaStatement.validateNotKeyWord;
|
||||
import static com.datastax.driver.core.schemabuilder.SchemaStatement.*;
|
||||
|
||||
import com.google.common.base.Optional;
|
||||
|
||||
public class CreateCustomIndex extends CreateIndex {
|
||||
|
||||
private String indexName;
|
||||
private boolean ifNotExists = false;
|
||||
private Optional<String> keyspaceName = Optional.absent();
|
||||
private String tableName;
|
||||
private String columnName;
|
||||
private boolean keys;
|
||||
private String indexName;
|
||||
private boolean ifNotExists = false;
|
||||
private Optional<String> keyspaceName = Optional.absent();
|
||||
private String tableName;
|
||||
private String columnName;
|
||||
private boolean keys;
|
||||
|
||||
CreateCustomIndex(String indexName) {
|
||||
super(indexName);
|
||||
validateNotEmpty(indexName, "Index name");
|
||||
validateNotKeyWord(
|
||||
indexName,
|
||||
String.format(
|
||||
"The index name '%s' is not allowed because it is a reserved keyword", indexName));
|
||||
this.indexName = indexName;
|
||||
}
|
||||
CreateCustomIndex(String indexName) {
|
||||
super(indexName);
|
||||
validateNotEmpty(indexName, "Index name");
|
||||
validateNotKeyWord(indexName,
|
||||
String.format("The index name '%s' is not allowed because it is a reserved keyword", indexName));
|
||||
this.indexName = indexName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add the 'IF NOT EXISTS' condition to this CREATE INDEX statement.
|
||||
*
|
||||
* @return this CREATE INDEX statement.
|
||||
*/
|
||||
public CreateIndex ifNotExists() {
|
||||
this.ifNotExists = true;
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Add the 'IF NOT EXISTS' condition to this CREATE INDEX statement.
|
||||
*
|
||||
* @return this CREATE INDEX statement.
|
||||
*/
|
||||
public CreateIndex ifNotExists() {
|
||||
this.ifNotExists = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify the keyspace and table to create the index on.
|
||||
*
|
||||
* @param keyspaceName the keyspace name.
|
||||
* @param tableName the table name.
|
||||
* @return a {@link CreateIndex.CreateIndexOn} that will allow the specification of the column.
|
||||
*/
|
||||
public CreateIndex.CreateIndexOn onTable(String keyspaceName, String tableName) {
|
||||
validateNotEmpty(keyspaceName, "Keyspace name");
|
||||
validateNotEmpty(tableName, "Table name");
|
||||
validateNotKeyWord(
|
||||
keyspaceName,
|
||||
String.format(
|
||||
"The keyspace name '%s' is not allowed because it is a reserved keyword",
|
||||
keyspaceName));
|
||||
validateNotKeyWord(
|
||||
tableName,
|
||||
String.format(
|
||||
"The table name '%s' is not allowed because it is a reserved keyword", tableName));
|
||||
this.keyspaceName = Optional.fromNullable(keyspaceName);
|
||||
this.tableName = tableName;
|
||||
return new CreateCustomIndex.CreateIndexOn();
|
||||
}
|
||||
/**
|
||||
* Specify the keyspace and table to create the index on.
|
||||
*
|
||||
* @param keyspaceName
|
||||
* the keyspace name.
|
||||
* @param tableName
|
||||
* the table name.
|
||||
* @return a {@link CreateIndex.CreateIndexOn} that will allow the specification
|
||||
* of the column.
|
||||
*/
|
||||
public CreateIndex.CreateIndexOn onTable(String keyspaceName, String tableName) {
|
||||
validateNotEmpty(keyspaceName, "Keyspace name");
|
||||
validateNotEmpty(tableName, "Table name");
|
||||
validateNotKeyWord(keyspaceName,
|
||||
String.format("The keyspace name '%s' is not allowed because it is a reserved keyword", keyspaceName));
|
||||
validateNotKeyWord(tableName,
|
||||
String.format("The table name '%s' is not allowed because it is a reserved keyword", tableName));
|
||||
this.keyspaceName = Optional.fromNullable(keyspaceName);
|
||||
this.tableName = tableName;
|
||||
return new CreateCustomIndex.CreateIndexOn();
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify the table to create the index on.
|
||||
*
|
||||
* @param tableName the table name.
|
||||
* @return a {@link CreateIndex.CreateIndexOn} that will allow the specification of the column.
|
||||
*/
|
||||
public CreateIndex.CreateIndexOn onTable(String tableName) {
|
||||
validateNotEmpty(tableName, "Table name");
|
||||
validateNotKeyWord(
|
||||
tableName,
|
||||
String.format(
|
||||
"The table name '%s' is not allowed because it is a reserved keyword", tableName));
|
||||
this.tableName = tableName;
|
||||
return new CreateCustomIndex.CreateIndexOn();
|
||||
}
|
||||
/**
|
||||
* Specify the table to create the index on.
|
||||
*
|
||||
* @param tableName
|
||||
* the table name.
|
||||
* @return a {@link CreateIndex.CreateIndexOn} that will allow the specification
|
||||
* of the column.
|
||||
*/
|
||||
public CreateIndex.CreateIndexOn onTable(String tableName) {
|
||||
validateNotEmpty(tableName, "Table name");
|
||||
validateNotKeyWord(tableName,
|
||||
String.format("The table name '%s' is not allowed because it is a reserved keyword", tableName));
|
||||
this.tableName = tableName;
|
||||
return new CreateCustomIndex.CreateIndexOn();
|
||||
}
|
||||
|
||||
public class CreateIndexOn extends CreateIndex.CreateIndexOn {
|
||||
/**
|
||||
* Specify the column to create the index on.
|
||||
*
|
||||
* @param columnName the column name.
|
||||
* @return the final CREATE INDEX statement.
|
||||
*/
|
||||
public SchemaStatement andColumn(String columnName) {
|
||||
validateNotEmpty(columnName, "Column name");
|
||||
validateNotKeyWord(
|
||||
columnName,
|
||||
String.format(
|
||||
"The column name '%s' is not allowed because it is a reserved keyword", columnName));
|
||||
CreateCustomIndex.this.columnName = columnName;
|
||||
return SchemaStatement.fromQueryString(buildInternal());
|
||||
}
|
||||
String getCustomClassName() {
|
||||
return "";
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an index on the keys of the given map column.
|
||||
*
|
||||
* @param columnName the column name.
|
||||
* @return the final CREATE INDEX statement.
|
||||
*/
|
||||
public SchemaStatement andKeysOfColumn(String columnName) {
|
||||
validateNotEmpty(columnName, "Column name");
|
||||
validateNotKeyWord(
|
||||
columnName,
|
||||
String.format(
|
||||
"The column name '%s' is not allowed because it is a reserved keyword", columnName));
|
||||
CreateCustomIndex.this.columnName = columnName;
|
||||
CreateCustomIndex.this.keys = true;
|
||||
return SchemaStatement.fromQueryString(buildInternal());
|
||||
}
|
||||
}
|
||||
String getOptions() {
|
||||
return "";
|
||||
}
|
||||
|
||||
String getCustomClassName() {
|
||||
return "";
|
||||
}
|
||||
@Override
|
||||
public String buildInternal() {
|
||||
StringBuilder createStatement = new StringBuilder(STATEMENT_START).append("CREATE CUSTOM INDEX ");
|
||||
|
||||
String getOptions() {
|
||||
return "";
|
||||
}
|
||||
if (ifNotExists) {
|
||||
createStatement.append("IF NOT EXISTS ");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String buildInternal() {
|
||||
StringBuilder createStatement =
|
||||
new StringBuilder(STATEMENT_START).append("CREATE CUSTOM INDEX ");
|
||||
createStatement.append(indexName).append(" ON ");
|
||||
|
||||
if (ifNotExists) {
|
||||
createStatement.append("IF NOT EXISTS ");
|
||||
}
|
||||
if (keyspaceName.isPresent()) {
|
||||
createStatement.append(keyspaceName.get()).append(".");
|
||||
}
|
||||
createStatement.append(tableName);
|
||||
|
||||
createStatement.append(indexName).append(" ON ");
|
||||
createStatement.append("(");
|
||||
if (keys) {
|
||||
createStatement.append("KEYS(");
|
||||
}
|
||||
|
||||
if (keyspaceName.isPresent()) {
|
||||
createStatement.append(keyspaceName.get()).append(".");
|
||||
}
|
||||
createStatement.append(tableName);
|
||||
createStatement.append(columnName);
|
||||
|
||||
createStatement.append("(");
|
||||
if (keys) {
|
||||
createStatement.append("KEYS(");
|
||||
}
|
||||
if (keys) {
|
||||
createStatement.append(")");
|
||||
}
|
||||
createStatement.append(")");
|
||||
|
||||
createStatement.append(columnName);
|
||||
createStatement.append(" USING '");
|
||||
createStatement.append(getCustomClassName());
|
||||
createStatement.append("' WITH OPTIONS = {");
|
||||
createStatement.append(getOptions());
|
||||
createStatement.append(" }");
|
||||
|
||||
if (keys) {
|
||||
createStatement.append(")");
|
||||
}
|
||||
createStatement.append(")");
|
||||
return createStatement.toString();
|
||||
}
|
||||
|
||||
createStatement.append(" USING '");
|
||||
createStatement.append(getCustomClassName());
|
||||
createStatement.append("' WITH OPTIONS = {");
|
||||
createStatement.append(getOptions());
|
||||
createStatement.append(" }");
|
||||
public class CreateIndexOn extends CreateIndex.CreateIndexOn {
|
||||
/**
|
||||
* Specify the column to create the index on.
|
||||
*
|
||||
* @param columnName
|
||||
* the column name.
|
||||
* @return the final CREATE INDEX statement.
|
||||
*/
|
||||
public SchemaStatement andColumn(String columnName) {
|
||||
validateNotEmpty(columnName, "Column name");
|
||||
validateNotKeyWord(columnName,
|
||||
String.format("The column name '%s' is not allowed because it is a reserved keyword", columnName));
|
||||
CreateCustomIndex.this.columnName = columnName;
|
||||
return SchemaStatement.fromQueryString(buildInternal());
|
||||
}
|
||||
|
||||
return createStatement.toString();
|
||||
}
|
||||
/**
|
||||
* Create an index on the keys of the given map column.
|
||||
*
|
||||
* @param columnName
|
||||
* the column name.
|
||||
* @return the final CREATE INDEX statement.
|
||||
*/
|
||||
public SchemaStatement andKeysOfColumn(String columnName) {
|
||||
validateNotEmpty(columnName, "Column name");
|
||||
validateNotKeyWord(columnName,
|
||||
String.format("The column name '%s' is not allowed because it is a reserved keyword", columnName));
|
||||
CreateCustomIndex.this.columnName = columnName;
|
||||
CreateCustomIndex.this.keys = true;
|
||||
return SchemaStatement.fromQueryString(buildInternal());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,49 +5,48 @@ import com.datastax.driver.core.querybuilder.Select;
|
|||
|
||||
public class CreateMaterializedView extends Create {
|
||||
|
||||
private String viewName;
|
||||
private Select.Where selection;
|
||||
private String primaryKey;
|
||||
private String clustering;
|
||||
private String viewName;
|
||||
private Select.Where selection;
|
||||
private String primaryKey;
|
||||
private String clustering;
|
||||
|
||||
public CreateMaterializedView(
|
||||
String keyspaceName, String viewName, Select.Where selection, String primaryKey, String clustering) {
|
||||
super(keyspaceName, viewName);
|
||||
this.viewName = viewName;
|
||||
this.selection = selection;
|
||||
this.primaryKey = primaryKey;
|
||||
this.clustering = clustering;
|
||||
}
|
||||
public CreateMaterializedView(String keyspaceName, String viewName, Select.Where selection, String primaryKey,
|
||||
String clustering) {
|
||||
super(keyspaceName, viewName);
|
||||
this.viewName = viewName;
|
||||
this.selection = selection;
|
||||
this.primaryKey = primaryKey;
|
||||
this.clustering = clustering;
|
||||
}
|
||||
|
||||
public String getQueryString(CodecRegistry codecRegistry) {
|
||||
return buildInternal();
|
||||
}
|
||||
public String getQueryString(CodecRegistry codecRegistry) {
|
||||
return buildInternal();
|
||||
}
|
||||
|
||||
public String buildInternal() {
|
||||
StringBuilder createStatement =
|
||||
new StringBuilder(STATEMENT_START).append("CREATE MATERIALIZED VIEW");
|
||||
if (ifNotExists) {
|
||||
createStatement.append(" IF NOT EXISTS");
|
||||
}
|
||||
createStatement.append(" ");
|
||||
if (keyspaceName.isPresent()) {
|
||||
createStatement.append(keyspaceName.get()).append(".");
|
||||
}
|
||||
createStatement.append(viewName);
|
||||
createStatement.append(" AS ");
|
||||
createStatement.append(selection.getQueryString());
|
||||
createStatement.setLength(createStatement.length() - 1);
|
||||
createStatement.append(" ");
|
||||
createStatement.append(primaryKey);
|
||||
if (clustering != null) {
|
||||
createStatement.append(" ").append(clustering);
|
||||
}
|
||||
createStatement.append(";");
|
||||
public String buildInternal() {
|
||||
StringBuilder createStatement = new StringBuilder(STATEMENT_START).append("CREATE MATERIALIZED VIEW");
|
||||
if (ifNotExists) {
|
||||
createStatement.append(" IF NOT EXISTS");
|
||||
}
|
||||
createStatement.append(" ");
|
||||
if (keyspaceName.isPresent()) {
|
||||
createStatement.append(keyspaceName.get()).append(".");
|
||||
}
|
||||
createStatement.append(viewName);
|
||||
createStatement.append(" AS ");
|
||||
createStatement.append(selection.getQueryString());
|
||||
createStatement.setLength(createStatement.length() - 1);
|
||||
createStatement.append(" ");
|
||||
createStatement.append(primaryKey);
|
||||
if (clustering != null) {
|
||||
createStatement.append(" ").append(clustering);
|
||||
}
|
||||
createStatement.append(";");
|
||||
|
||||
return createStatement.toString();
|
||||
}
|
||||
return createStatement.toString();
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return buildInternal();
|
||||
}
|
||||
public String toString() {
|
||||
return buildInternal();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,17 +2,16 @@ package com.datastax.driver.core.schemabuilder;
|
|||
|
||||
public class CreateSasiIndex extends CreateCustomIndex {
|
||||
|
||||
public CreateSasiIndex(String indexName) {
|
||||
super(indexName);
|
||||
}
|
||||
public CreateSasiIndex(String indexName) {
|
||||
super(indexName);
|
||||
}
|
||||
|
||||
String getCustomClassName() {
|
||||
return "org.apache.cassandra.index.sasi.SASIIndex";
|
||||
}
|
||||
String getCustomClassName() {
|
||||
return "org.apache.cassandra.index.sasi.SASIIndex";
|
||||
}
|
||||
|
||||
String getOptions() {
|
||||
return "'analyzer_class': "
|
||||
+ "'org.apache.cassandra.index.sasi.analyzer.NonTokenizingAnalyzer', "
|
||||
+ "'case_sensitive': 'false'";
|
||||
}
|
||||
String getOptions() {
|
||||
return "'analyzer_class': " + "'org.apache.cassandra.index.sasi.analyzer.NonTokenizingAnalyzer', "
|
||||
+ "'case_sensitive': 'false'";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,19 +20,19 @@ import com.datastax.driver.core.CodecRegistry;
|
|||
/** A built CREATE TABLE statement. */
|
||||
public class CreateTable extends Create {
|
||||
|
||||
public CreateTable(String keyspaceName, String tableName) {
|
||||
super(keyspaceName, tableName);
|
||||
}
|
||||
public CreateTable(String keyspaceName, String tableName) {
|
||||
super(keyspaceName, tableName);
|
||||
}
|
||||
|
||||
public CreateTable(String tableName) {
|
||||
super(tableName);
|
||||
}
|
||||
public CreateTable(String tableName) {
|
||||
super(tableName);
|
||||
}
|
||||
|
||||
public String getQueryString(CodecRegistry codecRegistry) {
|
||||
return buildInternal();
|
||||
}
|
||||
public String getQueryString(CodecRegistry codecRegistry) {
|
||||
return buildInternal();
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return buildInternal();
|
||||
}
|
||||
public String toString() {
|
||||
return buildInternal();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,50 +4,46 @@ import com.google.common.base.Optional;
|
|||
|
||||
public class DropMaterializedView extends Drop {
|
||||
|
||||
enum DroppedItem {
|
||||
TABLE,
|
||||
TYPE,
|
||||
INDEX,
|
||||
MATERIALIZED_VIEW
|
||||
}
|
||||
private final String itemType = "MATERIALIZED VIEW";
|
||||
private Optional<String> keyspaceName = Optional.absent();
|
||||
private String itemName;
|
||||
private boolean ifExists = true;
|
||||
public DropMaterializedView(String keyspaceName, String viewName) {
|
||||
this(keyspaceName, viewName, DroppedItem.MATERIALIZED_VIEW);
|
||||
}
|
||||
|
||||
private Optional<String> keyspaceName = Optional.absent();
|
||||
private String itemName;
|
||||
private boolean ifExists = true;
|
||||
private final String itemType = "MATERIALIZED VIEW";
|
||||
private DropMaterializedView(String keyspaceName, String viewName, DroppedItem itemType) {
|
||||
super(keyspaceName, viewName, Drop.DroppedItem.TABLE);
|
||||
validateNotEmpty(keyspaceName, "Keyspace name");
|
||||
this.keyspaceName = Optional.fromNullable(keyspaceName);
|
||||
this.itemName = viewName;
|
||||
}
|
||||
|
||||
public DropMaterializedView(String keyspaceName, String viewName) {
|
||||
this(keyspaceName, viewName, DroppedItem.MATERIALIZED_VIEW);
|
||||
}
|
||||
/**
|
||||
* Add the 'IF EXISTS' condition to this DROP statement.
|
||||
*
|
||||
* @return this statement.
|
||||
*/
|
||||
public Drop ifExists() {
|
||||
this.ifExists = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
private DropMaterializedView(String keyspaceName, String viewName, DroppedItem itemType) {
|
||||
super(keyspaceName, viewName, Drop.DroppedItem.TABLE);
|
||||
validateNotEmpty(keyspaceName, "Keyspace name");
|
||||
this.keyspaceName = Optional.fromNullable(keyspaceName);
|
||||
this.itemName = viewName;
|
||||
}
|
||||
@Override
|
||||
public String buildInternal() {
|
||||
StringBuilder dropStatement = new StringBuilder("DROP " + itemType + " ");
|
||||
if (ifExists) {
|
||||
dropStatement.append("IF EXISTS ");
|
||||
}
|
||||
if (keyspaceName.isPresent()) {
|
||||
dropStatement.append(keyspaceName.get()).append(".");
|
||||
}
|
||||
|
||||
/**
|
||||
* Add the 'IF EXISTS' condition to this DROP statement.
|
||||
*
|
||||
* @return this statement.
|
||||
*/
|
||||
public Drop ifExists() {
|
||||
this.ifExists = true;
|
||||
return this;
|
||||
}
|
||||
dropStatement.append(itemName);
|
||||
return dropStatement.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String buildInternal() {
|
||||
StringBuilder dropStatement = new StringBuilder("DROP " + itemType + " ");
|
||||
if (ifExists) {
|
||||
dropStatement.append("IF EXISTS ");
|
||||
}
|
||||
if (keyspaceName.isPresent()) {
|
||||
dropStatement.append(keyspaceName.get()).append(".");
|
||||
}
|
||||
|
||||
dropStatement.append(itemName);
|
||||
return dropStatement.toString();
|
||||
}
|
||||
enum DroppedItem {
|
||||
TABLE, TYPE, INDEX, MATERIALIZED_VIEW
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@ package net.helenus.config;
|
|||
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.function.Function;
|
||||
|
||||
import net.helenus.core.DslInstantiator;
|
||||
import net.helenus.core.MapperInstantiator;
|
||||
import net.helenus.core.reflect.ReflectionDslInstantiator;
|
||||
|
@ -25,23 +26,23 @@ import net.helenus.mapping.convert.CamelCaseToUnderscoreConverter;
|
|||
|
||||
public class DefaultHelenusSettings implements HelenusSettings {
|
||||
|
||||
@Override
|
||||
public Function<String, String> getPropertyToColumnConverter() {
|
||||
return CamelCaseToUnderscoreConverter.INSTANCE;
|
||||
}
|
||||
@Override
|
||||
public Function<String, String> getPropertyToColumnConverter() {
|
||||
return CamelCaseToUnderscoreConverter.INSTANCE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Function<Method, Boolean> getGetterMethodDetector() {
|
||||
return GetterMethodDetector.INSTANCE;
|
||||
}
|
||||
@Override
|
||||
public Function<Method, Boolean> getGetterMethodDetector() {
|
||||
return GetterMethodDetector.INSTANCE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DslInstantiator getDslInstantiator() {
|
||||
return ReflectionDslInstantiator.INSTANCE;
|
||||
}
|
||||
@Override
|
||||
public DslInstantiator getDslInstantiator() {
|
||||
return ReflectionDslInstantiator.INSTANCE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MapperInstantiator getMapperInstantiator() {
|
||||
return ReflectionMapperInstantiator.INSTANCE;
|
||||
}
|
||||
@Override
|
||||
public MapperInstantiator getMapperInstantiator() {
|
||||
return ReflectionMapperInstantiator.INSTANCE;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,31 +18,32 @@ package net.helenus.config;
|
|||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.util.function.Function;
|
||||
|
||||
import net.helenus.mapping.annotation.Transient;
|
||||
|
||||
public enum GetterMethodDetector implements Function<Method, Boolean> {
|
||||
INSTANCE;
|
||||
INSTANCE;
|
||||
|
||||
@Override
|
||||
public Boolean apply(Method method) {
|
||||
@Override
|
||||
public Boolean apply(Method method) {
|
||||
|
||||
if (method == null) {
|
||||
throw new IllegalArgumentException("empty parameter");
|
||||
}
|
||||
if (method == null) {
|
||||
throw new IllegalArgumentException("empty parameter");
|
||||
}
|
||||
|
||||
if (method.getParameterCount() != 0 || method.getReturnType() == void.class) {
|
||||
return false;
|
||||
}
|
||||
if (method.getParameterCount() != 0 || method.getReturnType() == void.class) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (Modifier.isStatic(method.getModifiers())) {
|
||||
return false;
|
||||
}
|
||||
if (Modifier.isStatic(method.getModifiers())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Methods marked "Transient" are not mapped, skip them.
|
||||
if (method.getDeclaredAnnotation(Transient.class) != null) {
|
||||
return false;
|
||||
}
|
||||
// Methods marked "Transient" are not mapped, skip them.
|
||||
if (method.getDeclaredAnnotation(Transient.class) != null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,16 +17,17 @@ package net.helenus.config;
|
|||
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.function.Function;
|
||||
|
||||
import net.helenus.core.DslInstantiator;
|
||||
import net.helenus.core.MapperInstantiator;
|
||||
|
||||
public interface HelenusSettings {
|
||||
|
||||
Function<String, String> getPropertyToColumnConverter();
|
||||
Function<String, String> getPropertyToColumnConverter();
|
||||
|
||||
Function<Method, Boolean> getGetterMethodDetector();
|
||||
Function<Method, Boolean> getGetterMethodDetector();
|
||||
|
||||
DslInstantiator getDslInstantiator();
|
||||
DslInstantiator getDslInstantiator();
|
||||
|
||||
MapperInstantiator getMapperInstantiator();
|
||||
MapperInstantiator getMapperInstantiator();
|
||||
}
|
||||
|
|
|
@ -3,36 +3,37 @@ package net.helenus.core;
|
|||
import java.time.LocalDateTime;
|
||||
import java.time.ZoneId;
|
||||
import java.util.Date;
|
||||
|
||||
import net.helenus.core.reflect.MapExportable;
|
||||
|
||||
public abstract class AbstractAuditedEntityDraft<E> extends AbstractEntityDraft<E> {
|
||||
|
||||
public AbstractAuditedEntityDraft(MapExportable entity) {
|
||||
super(entity);
|
||||
public AbstractAuditedEntityDraft(MapExportable entity) {
|
||||
super(entity);
|
||||
|
||||
Date in = new Date();
|
||||
LocalDateTime ldt = LocalDateTime.ofInstant(in.toInstant(), ZoneId.systemDefault());
|
||||
Date now = Date.from(ldt.atZone(ZoneId.systemDefault()).toInstant());
|
||||
Date in = new Date();
|
||||
LocalDateTime ldt = LocalDateTime.ofInstant(in.toInstant(), ZoneId.systemDefault());
|
||||
Date now = Date.from(ldt.atZone(ZoneId.systemDefault()).toInstant());
|
||||
|
||||
String who = getCurrentAuditor();
|
||||
String who = getCurrentAuditor();
|
||||
|
||||
if (entity == null) {
|
||||
if (who != null) {
|
||||
set("createdBy", who);
|
||||
}
|
||||
set("createdAt", now);
|
||||
}
|
||||
if (who != null) {
|
||||
set("modifiedBy", who);
|
||||
}
|
||||
set("modifiedAt", now);
|
||||
}
|
||||
if (entity == null) {
|
||||
if (who != null) {
|
||||
set("createdBy", who);
|
||||
}
|
||||
set("createdAt", now);
|
||||
}
|
||||
if (who != null) {
|
||||
set("modifiedBy", who);
|
||||
}
|
||||
set("modifiedAt", now);
|
||||
}
|
||||
|
||||
protected String getCurrentAuditor() {
|
||||
return null;
|
||||
}
|
||||
protected String getCurrentAuditor() {
|
||||
return null;
|
||||
}
|
||||
|
||||
public Date createdAt() {
|
||||
return (Date) get("createdAt", Date.class);
|
||||
}
|
||||
public Date createdAt() {
|
||||
return (Date) get("createdAt", Date.class);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,12 @@
|
|||
package net.helenus.core;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
|
||||
import com.google.common.primitives.Primitives;
|
||||
import java.util.*;
|
||||
|
||||
import net.helenus.core.reflect.DefaultPrimitiveTypes;
|
||||
import net.helenus.core.reflect.Drafted;
|
||||
import net.helenus.core.reflect.MapExportable;
|
||||
|
@ -9,151 +14,151 @@ import net.helenus.mapping.MappingUtil;
|
|||
|
||||
public abstract class AbstractEntityDraft<E> implements Drafted<E> {
|
||||
|
||||
private final Map<String, Object> backingMap = new HashMap<String, Object>();
|
||||
private final MapExportable entity;
|
||||
private final Map<String, Object> entityMap;
|
||||
private final Map<String, Object> backingMap = new HashMap<String, Object>();
|
||||
private final MapExportable entity;
|
||||
private final Map<String, Object> entityMap;
|
||||
|
||||
public AbstractEntityDraft(MapExportable entity) {
|
||||
this.entity = entity;
|
||||
this.entityMap = entity != null ? entity.toMap() : new HashMap<String, Object>();
|
||||
}
|
||||
public AbstractEntityDraft(MapExportable entity) {
|
||||
this.entity = entity;
|
||||
this.entityMap = entity != null ? entity.toMap() : new HashMap<String, Object>();
|
||||
}
|
||||
|
||||
public abstract Class<E> getEntityClass();
|
||||
public abstract Class<E> getEntityClass();
|
||||
|
||||
public E build() {
|
||||
return Helenus.map(getEntityClass(), toMap());
|
||||
}
|
||||
public E build() {
|
||||
return Helenus.map(getEntityClass(), toMap());
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected <T> T get(Getter<T> getter, Class<?> returnType) {
|
||||
return (T) get(this.<T>methodNameFor(getter), returnType);
|
||||
}
|
||||
@SuppressWarnings("unchecked")
|
||||
protected <T> T get(Getter<T> getter, Class<?> returnType) {
|
||||
return (T) get(this.<T>methodNameFor(getter), returnType);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected <T> T get(String key, Class<?> returnType) {
|
||||
T value = (T) backingMap.get(key);
|
||||
@SuppressWarnings("unchecked")
|
||||
protected <T> T get(String key, Class<?> returnType) {
|
||||
T value = (T) backingMap.get(key);
|
||||
|
||||
if (value == null) {
|
||||
value = (T) entityMap.get(key);
|
||||
if (value == null) {
|
||||
if (value == null) {
|
||||
value = (T) entityMap.get(key);
|
||||
if (value == null) {
|
||||
|
||||
if (Primitives.allPrimitiveTypes().contains(returnType)) {
|
||||
if (Primitives.allPrimitiveTypes().contains(returnType)) {
|
||||
|
||||
DefaultPrimitiveTypes type = DefaultPrimitiveTypes.lookup(returnType);
|
||||
if (type == null) {
|
||||
throw new RuntimeException("unknown primitive type " + returnType);
|
||||
}
|
||||
DefaultPrimitiveTypes type = DefaultPrimitiveTypes.lookup(returnType);
|
||||
if (type == null) {
|
||||
throw new RuntimeException("unknown primitive type " + returnType);
|
||||
}
|
||||
|
||||
return (T) type.getDefaultValue();
|
||||
}
|
||||
}
|
||||
}
|
||||
return (T) type.getDefaultValue();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
protected <T> Object set(Getter<T> getter, Object value) {
|
||||
return set(this.<T>methodNameFor(getter), value);
|
||||
}
|
||||
protected <T> Object set(Getter<T> getter, Object value) {
|
||||
return set(this.<T>methodNameFor(getter), value);
|
||||
}
|
||||
|
||||
protected Object set(String key, Object value) {
|
||||
if (key == null || value == null) {
|
||||
return null;
|
||||
}
|
||||
protected Object set(String key, Object value) {
|
||||
if (key == null || value == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
backingMap.put(key, value);
|
||||
return value;
|
||||
}
|
||||
backingMap.put(key, value);
|
||||
return value;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected <T> T mutate(Getter<T> getter, T value) {
|
||||
return (T) mutate(this.<T>methodNameFor(getter), value);
|
||||
}
|
||||
@SuppressWarnings("unchecked")
|
||||
protected <T> T mutate(Getter<T> getter, T value) {
|
||||
return (T) mutate(this.<T>methodNameFor(getter), value);
|
||||
}
|
||||
|
||||
protected Object mutate(String key, Object value) {
|
||||
Objects.requireNonNull(key);
|
||||
protected Object mutate(String key, Object value) {
|
||||
Objects.requireNonNull(key);
|
||||
|
||||
if (value == null) {
|
||||
return null;
|
||||
}
|
||||
if (value == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (entity != null) {
|
||||
Map<String, Object> map = entity.toMap();
|
||||
if (entity != null) {
|
||||
Map<String, Object> map = entity.toMap();
|
||||
|
||||
if (map.containsKey(key) && !value.equals(map.get(key))) {
|
||||
backingMap.put(key, value);
|
||||
return value;
|
||||
}
|
||||
if (map.containsKey(key) && !value.equals(map.get(key))) {
|
||||
backingMap.put(key, value);
|
||||
return value;
|
||||
}
|
||||
|
||||
return map.get(key);
|
||||
} else {
|
||||
backingMap.put(key, value);
|
||||
return map.get(key);
|
||||
} else {
|
||||
backingMap.put(key, value);
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private <T> String methodNameFor(Getter<T> getter) {
|
||||
return MappingUtil.resolveMappingProperty(getter).getProperty().getPropertyName();
|
||||
}
|
||||
private <T> String methodNameFor(Getter<T> getter) {
|
||||
return MappingUtil.resolveMappingProperty(getter).getProperty().getPropertyName();
|
||||
}
|
||||
|
||||
public <T> Object unset(Getter<T> getter) {
|
||||
return unset(methodNameFor(getter));
|
||||
}
|
||||
public <T> Object unset(Getter<T> getter) {
|
||||
return unset(methodNameFor(getter));
|
||||
}
|
||||
|
||||
public Object unset(String key) {
|
||||
if (key != null) {
|
||||
Object value = backingMap.get(key);
|
||||
backingMap.put(key, null);
|
||||
return value;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
public Object unset(String key) {
|
||||
if (key != null) {
|
||||
Object value = backingMap.get(key);
|
||||
backingMap.put(key, null);
|
||||
return value;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public <T> boolean reset(Getter<T> getter, T desiredValue) {
|
||||
return this.<T>reset(this.<T>methodNameFor(getter), desiredValue);
|
||||
}
|
||||
public <T> boolean reset(Getter<T> getter, T desiredValue) {
|
||||
return this.<T>reset(this.<T>methodNameFor(getter), desiredValue);
|
||||
}
|
||||
|
||||
public <T> boolean reset(String key, T desiredValue) {
|
||||
if (key != null && desiredValue != null) {
|
||||
@SuppressWarnings("unchecked")
|
||||
T currentValue = (T) backingMap.get(key);
|
||||
if (currentValue == null || !currentValue.equals(desiredValue)) {
|
||||
set(key, desiredValue);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
public <T> boolean reset(String key, T desiredValue) {
|
||||
if (key != null && desiredValue != null) {
|
||||
@SuppressWarnings("unchecked")
|
||||
T currentValue = (T) backingMap.get(key);
|
||||
if (currentValue == null || !currentValue.equals(desiredValue)) {
|
||||
set(key, desiredValue);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Object> toMap() {
|
||||
return toMap(entityMap);
|
||||
}
|
||||
@Override
|
||||
public Map<String, Object> toMap() {
|
||||
return toMap(entityMap);
|
||||
}
|
||||
|
||||
public Map<String, Object> toMap(Map<String, Object> entityMap) {
|
||||
Map<String, Object> combined;
|
||||
if (entityMap != null && entityMap.size() > 0) {
|
||||
combined = new HashMap<String, Object>(entityMap.size());
|
||||
for (String key : entityMap.keySet()) {
|
||||
combined.put(key, entityMap.get(key));
|
||||
}
|
||||
} else {
|
||||
combined = new HashMap<String, Object>(backingMap.size());
|
||||
}
|
||||
for (String key : mutated()) {
|
||||
combined.put(key, backingMap.get(key));
|
||||
}
|
||||
return combined;
|
||||
}
|
||||
public Map<String, Object> toMap(Map<String, Object> entityMap) {
|
||||
Map<String, Object> combined;
|
||||
if (entityMap != null && entityMap.size() > 0) {
|
||||
combined = new HashMap<String, Object>(entityMap.size());
|
||||
for (String key : entityMap.keySet()) {
|
||||
combined.put(key, entityMap.get(key));
|
||||
}
|
||||
} else {
|
||||
combined = new HashMap<String, Object>(backingMap.size());
|
||||
}
|
||||
for (String key : mutated()) {
|
||||
combined.put(key, backingMap.get(key));
|
||||
}
|
||||
return combined;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<String> mutated() {
|
||||
return backingMap.keySet();
|
||||
}
|
||||
@Override
|
||||
public Set<String> mutated() {
|
||||
return backingMap.keySet();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return backingMap.toString();
|
||||
}
|
||||
@Override
|
||||
public String toString() {
|
||||
return backingMap.toString();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,112 +15,128 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import brave.Tracer;
|
||||
import com.codahale.metrics.MetricRegistry;
|
||||
import com.datastax.driver.core.*;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
import com.google.common.util.concurrent.ListenableFuture;
|
||||
import java.io.PrintStream;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.Executor;
|
||||
import net.helenus.mapping.value.ColumnValuePreparer;
|
||||
import net.helenus.mapping.value.ColumnValueProvider;
|
||||
import net.helenus.support.HelenusException;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.codahale.metrics.MetricRegistry;
|
||||
import com.datastax.driver.core.*;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
import com.google.common.collect.Table;
|
||||
import com.google.common.util.concurrent.ListenableFuture;
|
||||
|
||||
import brave.Tracer;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.mapping.value.ColumnValuePreparer;
|
||||
import net.helenus.mapping.value.ColumnValueProvider;
|
||||
import net.helenus.support.HelenusException;
|
||||
|
||||
public abstract class AbstractSessionOperations {
|
||||
|
||||
final Logger logger = LoggerFactory.getLogger(getClass());
|
||||
private static final Logger LOG = LoggerFactory.getLogger(AbstractSessionOperations.class);
|
||||
|
||||
public abstract Session currentSession();
|
||||
public abstract Session currentSession();
|
||||
|
||||
public abstract String usingKeyspace();
|
||||
public abstract String usingKeyspace();
|
||||
|
||||
public abstract boolean isShowCql();
|
||||
public abstract boolean isShowCql();
|
||||
|
||||
public abstract PrintStream getPrintStream();
|
||||
public abstract PrintStream getPrintStream();
|
||||
|
||||
public abstract Executor getExecutor();
|
||||
public abstract Executor getExecutor();
|
||||
|
||||
public abstract SessionRepository getSessionRepository();
|
||||
public abstract SessionRepository getSessionRepository();
|
||||
|
||||
public abstract ColumnValueProvider getValueProvider();
|
||||
public abstract ColumnValueProvider getValueProvider();
|
||||
|
||||
public abstract ColumnValuePreparer getValuePreparer();
|
||||
public abstract ColumnValuePreparer getValuePreparer();
|
||||
|
||||
public abstract ConsistencyLevel getDefaultConsistencyLevel();
|
||||
public abstract ConsistencyLevel getDefaultConsistencyLevel();
|
||||
|
||||
public abstract boolean getDefaultQueryIdempotency();
|
||||
public abstract boolean getDefaultQueryIdempotency();
|
||||
|
||||
public PreparedStatement prepare(RegularStatement statement) {
|
||||
try {
|
||||
log(statement, false);
|
||||
return currentSession().prepare(statement);
|
||||
} catch (RuntimeException e) {
|
||||
throw translateException(e);
|
||||
}
|
||||
}
|
||||
public PreparedStatement prepare(RegularStatement statement) {
|
||||
try {
|
||||
log(statement, false);
|
||||
return currentSession().prepare(statement);
|
||||
} catch (RuntimeException e) {
|
||||
throw translateException(e);
|
||||
}
|
||||
}
|
||||
|
||||
public ListenableFuture<PreparedStatement> prepareAsync(RegularStatement statement) {
|
||||
try {
|
||||
log(statement, false);
|
||||
return currentSession().prepareAsync(statement);
|
||||
} catch (RuntimeException e) {
|
||||
throw translateException(e);
|
||||
}
|
||||
}
|
||||
public ListenableFuture<PreparedStatement> prepareAsync(RegularStatement statement) {
|
||||
try {
|
||||
log(statement, false);
|
||||
return currentSession().prepareAsync(statement);
|
||||
} catch (RuntimeException e) {
|
||||
throw translateException(e);
|
||||
}
|
||||
}
|
||||
|
||||
public ResultSet execute(Statement statement, boolean showValues) {
|
||||
return executeAsync(statement, showValues).getUninterruptibly();
|
||||
}
|
||||
public ResultSet execute(Statement statement, boolean showValues) {
|
||||
return executeAsync(statement, showValues).getUninterruptibly();
|
||||
}
|
||||
|
||||
public ResultSetFuture executeAsync(Statement statement, boolean showValues) {
|
||||
try {
|
||||
log(statement, showValues);
|
||||
return currentSession().executeAsync(statement);
|
||||
} catch (RuntimeException e) {
|
||||
throw translateException(e);
|
||||
}
|
||||
}
|
||||
public ResultSetFuture executeAsync(Statement statement, boolean showValues) {
|
||||
try {
|
||||
log(statement, showValues);
|
||||
return currentSession().executeAsync(statement);
|
||||
} catch (RuntimeException e) {
|
||||
throw translateException(e);
|
||||
}
|
||||
}
|
||||
|
||||
void log(Statement statement, boolean showValues) {
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info("Execute statement " + statement);
|
||||
}
|
||||
if (isShowCql()) {
|
||||
if (statement instanceof BuiltStatement) {
|
||||
BuiltStatement builtStatement = (BuiltStatement) statement;
|
||||
if (showValues) {
|
||||
RegularStatement regularStatement = builtStatement.setForceNoValues(true);
|
||||
printCql(regularStatement.getQueryString());
|
||||
} else {
|
||||
printCql(builtStatement.getQueryString());
|
||||
}
|
||||
} else if (statement instanceof RegularStatement) {
|
||||
RegularStatement regularStatement = (RegularStatement) statement;
|
||||
printCql(regularStatement.getQueryString());
|
||||
} else {
|
||||
printCql(statement.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
void log(Statement statement, boolean showValues) {
|
||||
if (LOG.isInfoEnabled()) {
|
||||
LOG.info("Execute statement " + statement);
|
||||
}
|
||||
if (isShowCql()) {
|
||||
if (statement instanceof BuiltStatement) {
|
||||
BuiltStatement builtStatement = (BuiltStatement) statement;
|
||||
if (showValues) {
|
||||
RegularStatement regularStatement = builtStatement.setForceNoValues(true);
|
||||
printCql(regularStatement.getQueryString());
|
||||
} else {
|
||||
printCql(builtStatement.getQueryString());
|
||||
}
|
||||
} else if (statement instanceof RegularStatement) {
|
||||
RegularStatement regularStatement = (RegularStatement) statement;
|
||||
printCql(regularStatement.getQueryString());
|
||||
} else {
|
||||
printCql(statement.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public Tracer getZipkinTracer() {
|
||||
return null;
|
||||
}
|
||||
public Tracer getZipkinTracer() {
|
||||
return null;
|
||||
}
|
||||
|
||||
public MetricRegistry getMetricRegistry() {
|
||||
return null;
|
||||
}
|
||||
public MetricRegistry getMetricRegistry() {
|
||||
return null;
|
||||
}
|
||||
|
||||
RuntimeException translateException(RuntimeException e) {
|
||||
if (e instanceof HelenusException) {
|
||||
return e;
|
||||
}
|
||||
throw new HelenusException(e);
|
||||
}
|
||||
public void mergeCache(Table<String, String, Object> cache) {
|
||||
}
|
||||
|
||||
void printCql(String cql) {
|
||||
getPrintStream().println(cql);
|
||||
}
|
||||
RuntimeException translateException(RuntimeException e) {
|
||||
if (e instanceof HelenusException) {
|
||||
return e;
|
||||
}
|
||||
throw new HelenusException(e);
|
||||
}
|
||||
|
||||
public Object checkCache(String tableName, List<Facet> facets) {
|
||||
return null;
|
||||
}
|
||||
|
||||
public void updateCache(Object pojo, List<Facet> facets) {
|
||||
}
|
||||
|
||||
void printCql(String cql) {
|
||||
getPrintStream().println(cql);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,163 +15,239 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import com.diffplug.common.base.Errors;
|
||||
import com.google.common.collect.TreeTraverser;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.diffplug.common.base.Errors;
|
||||
import com.google.common.base.Stopwatch;
|
||||
import com.google.common.collect.HashBasedTable;
|
||||
import com.google.common.collect.Table;
|
||||
import com.google.common.collect.TreeTraverser;
|
||||
|
||||
import net.helenus.core.cache.CacheUtil;
|
||||
import net.helenus.core.cache.Facet;
|
||||
|
||||
/** Encapsulates the concept of a "transaction" as a unit-of-work. */
|
||||
public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfWork, AutoCloseable {
|
||||
private final List<AbstractUnitOfWork<E>> nested = new ArrayList<>();
|
||||
private final HelenusSession session;
|
||||
private final AbstractUnitOfWork<E> parent;
|
||||
private List<CommitThunk> postCommit = new ArrayList<CommitThunk>();
|
||||
private final Map<String, Set<Object>> cache = new HashMap<String, Set<Object>>();
|
||||
private boolean aborted = false;
|
||||
private boolean committed = false;
|
||||
public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfWork<E>, AutoCloseable {
|
||||
|
||||
protected AbstractUnitOfWork(HelenusSession session, AbstractUnitOfWork<E> parent) {
|
||||
Objects.requireNonNull(session, "containing session cannot be null");
|
||||
private static final Logger LOG = LoggerFactory.getLogger(AbstractUnitOfWork.class);
|
||||
|
||||
this.session = session;
|
||||
this.parent = parent;
|
||||
}
|
||||
private final List<AbstractUnitOfWork<E>> nested = new ArrayList<>();
|
||||
private final HelenusSession session;
|
||||
private final AbstractUnitOfWork<E> parent;
|
||||
// Cache:
|
||||
private final Table<String, String, Object> cache = HashBasedTable.create();
|
||||
private List<CommitThunk> postCommit = new ArrayList<CommitThunk>();
|
||||
private boolean aborted = false;
|
||||
private boolean committed = false;
|
||||
private String purpose_;
|
||||
private Stopwatch elapsedTime_;
|
||||
private Stopwatch databaseTime_ = Stopwatch.createUnstarted();
|
||||
private Stopwatch cacheLookupTime_ = Stopwatch.createUnstarted();
|
||||
|
||||
public UnitOfWork addNestedUnitOfWork(UnitOfWork uow) {
|
||||
synchronized (nested) {
|
||||
nested.add((AbstractUnitOfWork<E>) uow);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
protected AbstractUnitOfWork(HelenusSession session, AbstractUnitOfWork<E> parent) {
|
||||
Objects.requireNonNull(session, "containing session cannot be null");
|
||||
|
||||
public UnitOfWork begin() {
|
||||
// log.record(txn::start)
|
||||
return this;
|
||||
}
|
||||
this.session = session;
|
||||
this.parent = parent;
|
||||
}
|
||||
|
||||
private void applyPostCommitFunctions() {
|
||||
if (!postCommit.isEmpty()) {
|
||||
for (CommitThunk f : postCommit) {
|
||||
f.apply();
|
||||
}
|
||||
}
|
||||
}
|
||||
@Override
|
||||
public Stopwatch getExecutionTimer() {
|
||||
return databaseTime_;
|
||||
}
|
||||
|
||||
public Set<Object> cacheLookup(String key) {
|
||||
Set<Object> r = getCache().get(key);
|
||||
if (r != null) {
|
||||
return r;
|
||||
} else {
|
||||
if (parent != null) {
|
||||
return parent.cacheLookup(key);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
@Override
|
||||
public Stopwatch getCacheLookupTimer() {
|
||||
return cacheLookupTime_;
|
||||
}
|
||||
|
||||
public Map<String, Set<Object>> getCache() {
|
||||
return cache;
|
||||
}
|
||||
@Override
|
||||
public void addNestedUnitOfWork(UnitOfWork<E> uow) {
|
||||
synchronized (nested) {
|
||||
nested.add((AbstractUnitOfWork<E>) uow);
|
||||
}
|
||||
}
|
||||
|
||||
private Iterator<AbstractUnitOfWork<E>> getChildNodes() {
|
||||
return nested.iterator();
|
||||
}
|
||||
@Override
|
||||
public UnitOfWork<E> begin() {
|
||||
elapsedTime_ = Stopwatch.createStarted();
|
||||
// log.record(txn::start)
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks to see if the work performed between calling begin and now can be committed or not.
|
||||
*
|
||||
* @return a function from which to chain work that only happens when commit is successful
|
||||
* @throws E when the work overlaps with other concurrent writers.
|
||||
*/
|
||||
public PostCommitFunction<Void, Void> commit() throws E {
|
||||
// All nested UnitOfWork should be committed (not aborted) before calls to commit, check.
|
||||
boolean canCommit = true;
|
||||
TreeTraverser<AbstractUnitOfWork<E>> traverser =
|
||||
TreeTraverser.using(node -> node::getChildNodes);
|
||||
for (AbstractUnitOfWork<E> uow : traverser.postOrderTraversal(this)) {
|
||||
if (this != uow) {
|
||||
canCommit &= (!uow.aborted && uow.committed);
|
||||
}
|
||||
}
|
||||
@Override
|
||||
public UnitOfWork setPurpose(String purpose) {
|
||||
purpose_ = purpose;
|
||||
return this;
|
||||
}
|
||||
|
||||
// log.record(txn::provisionalCommit)
|
||||
// examine log for conflicts in read-set and write-set between begin and provisional commit
|
||||
// if (conflict) { throw new ConflictingUnitOfWorkException(this) }
|
||||
// else return function so as to enable commit.andThen(() -> { do something iff commit was successful; })
|
||||
public void logTimers(String what) {
|
||||
double e = (double) elapsedTime_.elapsed(TimeUnit.MICROSECONDS) / 1000.0;
|
||||
double d = (double) databaseTime_.elapsed(TimeUnit.MICROSECONDS) / 1000.0;
|
||||
double c = (double) cacheLookupTime_.elapsed(TimeUnit.MICROSECONDS) / 1000.0;
|
||||
double fd = (d / (e - c)) * 100.0;
|
||||
double fc = (c / (e - d)) * 100.0;
|
||||
LOG.info(String.format("UOW(%s)%s %s (total: %.3fms cache: %.3fms %2.2f%% db: %.3fms %2.2f%%)", hashCode(),
|
||||
(purpose_ == null ? "" : " " + purpose_), what, e, c, fc, d, fd));
|
||||
}
|
||||
|
||||
if (canCommit) {
|
||||
committed = true;
|
||||
aborted = false;
|
||||
private void applyPostCommitFunctions() {
|
||||
if (!postCommit.isEmpty()) {
|
||||
for (CommitThunk f : postCommit) {
|
||||
f.apply();
|
||||
}
|
||||
}
|
||||
logTimers("committed");
|
||||
}
|
||||
|
||||
// TODO(gburd): union this cache with parent's (if there is a parent) or with the session cache for all cacheable entities we currently hold
|
||||
@Override
|
||||
public Optional<Object> cacheLookup(List<Facet> facets) {
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
Optional<Object> result = Optional.empty();
|
||||
for (Facet facet : facets) {
|
||||
if (!facet.fixed()) {
|
||||
String columnName = facet.name() + "==" + facet.value();
|
||||
Object value = cache.get(tableName, columnName);
|
||||
if (value != null) {
|
||||
if (result.isPresent() && result.get() != value) {
|
||||
// One facet matched, but another did not.
|
||||
result = Optional.empty();
|
||||
break;
|
||||
} else {
|
||||
result = Optional.of(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!result.isPresent()) {
|
||||
// Be sure to check all enclosing UnitOfWork caches as well, we may be nested.
|
||||
if (parent != null) {
|
||||
return parent.cacheLookup(facets);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
nested.forEach((uow) -> Errors.rethrow().wrap(uow::commit));
|
||||
@Override
|
||||
public void cacheUpdate(Object value, List<Facet> facets) {
|
||||
Facet table = facets.remove(0);
|
||||
String tableName = table.value().toString();
|
||||
for (Facet facet : facets) {
|
||||
String columnName = facet.name() + "==" + facet.value();
|
||||
cache.put(tableName, columnName, value);
|
||||
}
|
||||
}
|
||||
|
||||
// Merge UOW cache into parent's cache.
|
||||
if (parent != null) {
|
||||
Map<String, Set<Object>> parentCache = parent.getCache();
|
||||
for (String key : cache.keySet()) {
|
||||
if (parentCache.containsKey(key)) {
|
||||
// merge the sets
|
||||
Set<Object> ps = parentCache.get(key);
|
||||
ps.addAll(
|
||||
cache.get(key)); //TODO(gburd): review this, likely not correct in all cases as-is.
|
||||
} else {
|
||||
// add the missing set
|
||||
parentCache.put(key, cache.get(key));
|
||||
}
|
||||
}
|
||||
}
|
||||
private Iterator<AbstractUnitOfWork<E>> getChildNodes() {
|
||||
return nested.iterator();
|
||||
}
|
||||
|
||||
// Apply all post-commit functions for
|
||||
if (parent == null) {
|
||||
traverser
|
||||
.postOrderTraversal(this)
|
||||
.forEach(
|
||||
uow -> {
|
||||
uow.applyPostCommitFunctions();
|
||||
});
|
||||
return new PostCommitFunction(this, null);
|
||||
}
|
||||
}
|
||||
// else {
|
||||
// Constructor<T> ctor = clazz.getConstructor(conflictExceptionClass);
|
||||
// T object = ctor.newInstance(new Object[] { String message });
|
||||
// }
|
||||
return new PostCommitFunction(this, postCommit);
|
||||
}
|
||||
/**
|
||||
* Checks to see if the work performed between calling begin and now can be
|
||||
* committed or not.
|
||||
*
|
||||
* @return a function from which to chain work that only happens when commit is
|
||||
* successful
|
||||
* @throws E
|
||||
* when the work overlaps with other concurrent writers.
|
||||
*/
|
||||
public PostCommitFunction<Void, Void> commit() throws E {
|
||||
// All nested UnitOfWork should be committed (not aborted) before calls to
|
||||
// commit, check.
|
||||
boolean canCommit = true;
|
||||
TreeTraverser<AbstractUnitOfWork<E>> traverser = TreeTraverser.using(node -> node::getChildNodes);
|
||||
for (AbstractUnitOfWork<E> uow : traverser.postOrderTraversal(this)) {
|
||||
if (this != uow) {
|
||||
canCommit &= (!uow.aborted && uow.committed);
|
||||
}
|
||||
}
|
||||
|
||||
/* Explicitly discard the work and mark it as as such in the log. */
|
||||
public void abort() {
|
||||
TreeTraverser<AbstractUnitOfWork<E>> traverser =
|
||||
TreeTraverser.using(node -> node::getChildNodes);
|
||||
traverser
|
||||
.postOrderTraversal(this)
|
||||
.forEach(
|
||||
uow -> {
|
||||
uow.committed = false;
|
||||
uow.aborted = true;
|
||||
});
|
||||
// log.record(txn::abort)
|
||||
// cache.invalidateSince(txn::start time)
|
||||
}
|
||||
// log.record(txn::provisionalCommit)
|
||||
// examine log for conflicts in read-set and write-set between begin and
|
||||
// provisional commit
|
||||
// if (conflict) { throw new ConflictingUnitOfWorkException(this) }
|
||||
// else return function so as to enable commit.andThen(() -> { do something iff
|
||||
// commit was successful; })
|
||||
|
||||
public String describeConflicts() {
|
||||
return "it's complex...";
|
||||
}
|
||||
if (canCommit) {
|
||||
committed = true;
|
||||
aborted = false;
|
||||
|
||||
@Override
|
||||
public void close() throws E {
|
||||
// Closing a AbstractUnitOfWork will abort iff we've not already aborted or committed this unit of work.
|
||||
if (aborted == false && committed == false) {
|
||||
abort();
|
||||
}
|
||||
}
|
||||
nested.forEach((uow) -> Errors.rethrow().wrap(uow::commit));
|
||||
|
||||
public boolean hasAborted() {
|
||||
return aborted;
|
||||
}
|
||||
// Merge UOW cache into parent's cache.
|
||||
if (parent != null) {
|
||||
parent.mergeCache(cache);
|
||||
} else {
|
||||
session.mergeCache(cache);
|
||||
}
|
||||
elapsedTime_.stop();
|
||||
|
||||
public boolean hasCommitted() {
|
||||
return committed;
|
||||
}
|
||||
// Apply all post-commit functions for
|
||||
if (parent == null) {
|
||||
traverser.postOrderTraversal(this).forEach(uow -> {
|
||||
uow.applyPostCommitFunctions();
|
||||
});
|
||||
return new PostCommitFunction(this, null);
|
||||
}
|
||||
}
|
||||
// else {
|
||||
// Constructor<T> ctor = clazz.getConstructor(conflictExceptionClass);
|
||||
// T object = ctor.newInstance(new Object[] { String message });
|
||||
// }
|
||||
return new PostCommitFunction(this, postCommit);
|
||||
}
|
||||
|
||||
/* Explicitly discard the work and mark it as as such in the log. */
|
||||
public void abort() {
|
||||
TreeTraverser<AbstractUnitOfWork<E>> traverser = TreeTraverser.using(node -> node::getChildNodes);
|
||||
traverser.postOrderTraversal(this).forEach(uow -> {
|
||||
uow.committed = false;
|
||||
uow.aborted = true;
|
||||
});
|
||||
// log.record(txn::abort)
|
||||
// cache.invalidateSince(txn::start time)
|
||||
if (!hasAborted()) {
|
||||
elapsedTime_.stop();
|
||||
logTimers("aborted");
|
||||
}
|
||||
}
|
||||
|
||||
private void mergeCache(Table<String, String, Object> from) {
|
||||
Table<String, String, Object> to = this.cache;
|
||||
from.rowMap().forEach((rowKey, columnMap) -> {
|
||||
columnMap.forEach((columnKey, value) -> {
|
||||
if (to.contains(rowKey, columnKey)) {
|
||||
to.put(rowKey, columnKey, CacheUtil.merge(to.get(rowKey, columnKey), from.get(rowKey, columnKey)));
|
||||
} else {
|
||||
to.put(rowKey, columnKey, from.get(rowKey, columnKey));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
public String describeConflicts() {
|
||||
return "it's complex...";
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws E {
|
||||
// Closing a AbstractUnitOfWork will abort iff we've not already aborted or
|
||||
// committed this unit of work.
|
||||
if (aborted == false && committed == false) {
|
||||
abort();
|
||||
}
|
||||
}
|
||||
|
||||
public boolean hasAborted() {
|
||||
return aborted;
|
||||
}
|
||||
|
||||
public boolean hasCommitted() {
|
||||
return committed;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,8 +16,5 @@
|
|||
package net.helenus.core;
|
||||
|
||||
public enum AutoDdl {
|
||||
VALIDATE,
|
||||
UPDATE,
|
||||
CREATE,
|
||||
CREATE_DROP;
|
||||
VALIDATE, UPDATE, CREATE, CREATE_DROP;
|
||||
}
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
package net.helenus.core;
|
||||
|
||||
|
||||
@FunctionalInterface
|
||||
public interface CommitThunk {
|
||||
void apply();
|
||||
void apply();
|
||||
}
|
||||
|
|
|
@ -2,9 +2,9 @@ package net.helenus.core;
|
|||
|
||||
public class ConflictingUnitOfWorkException extends Exception {
|
||||
|
||||
final UnitOfWork uow;
|
||||
final UnitOfWork uow;
|
||||
|
||||
ConflictingUnitOfWorkException(UnitOfWork uow) {
|
||||
this.uow = uow;
|
||||
}
|
||||
ConflictingUnitOfWorkException(UnitOfWork uow) {
|
||||
this.uow = uow;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,15 +15,13 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import com.datastax.driver.core.Metadata;
|
||||
import java.util.Optional;
|
||||
|
||||
import com.datastax.driver.core.Metadata;
|
||||
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
|
||||
public interface DslInstantiator {
|
||||
|
||||
<E> E instantiate(
|
||||
Class<E> iface,
|
||||
ClassLoader classLoader,
|
||||
Optional<HelenusPropertyNode> parent,
|
||||
Metadata metadata);
|
||||
<E> E instantiate(Class<E> iface, ClassLoader classLoader, Optional<HelenusPropertyNode> parent, Metadata metadata);
|
||||
}
|
||||
|
|
|
@ -15,97 +15,102 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import com.datastax.driver.core.querybuilder.Clause;
|
||||
import java.util.Objects;
|
||||
|
||||
import com.datastax.driver.core.querybuilder.Clause;
|
||||
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.mapping.MappingUtil;
|
||||
import net.helenus.mapping.value.ColumnValuePreparer;
|
||||
|
||||
public final class Filter<V> {
|
||||
|
||||
private final HelenusPropertyNode node;
|
||||
private final Postulate<V> postulate;
|
||||
private final HelenusPropertyNode node;
|
||||
private final Postulate<V> postulate;
|
||||
|
||||
private Filter(HelenusPropertyNode node, Postulate<V> postulate) {
|
||||
this.node = node;
|
||||
this.postulate = postulate;
|
||||
}
|
||||
private Filter(HelenusPropertyNode node, Postulate<V> postulate) {
|
||||
this.node = node;
|
||||
this.postulate = postulate;
|
||||
}
|
||||
|
||||
public HelenusPropertyNode getNode() {
|
||||
return node;
|
||||
}
|
||||
public static <V> Filter<V> equal(Getter<V> getter, V val) {
|
||||
return create(getter, Operator.EQ, val);
|
||||
}
|
||||
|
||||
public Clause getClause(ColumnValuePreparer valuePreparer) {
|
||||
return postulate.getClause(node, valuePreparer);
|
||||
}
|
||||
public static <V> Filter<V> in(Getter<V> getter, V... vals) {
|
||||
Objects.requireNonNull(getter, "empty getter");
|
||||
Objects.requireNonNull(vals, "empty values");
|
||||
|
||||
public static <V> Filter<V> equal(Getter<V> getter, V val) {
|
||||
return create(getter, Operator.EQ, val);
|
||||
}
|
||||
if (vals.length == 0) {
|
||||
throw new IllegalArgumentException("values array is empty");
|
||||
}
|
||||
|
||||
public static <V> Filter<V> in(Getter<V> getter, V... vals) {
|
||||
Objects.requireNonNull(getter, "empty getter");
|
||||
Objects.requireNonNull(vals, "empty values");
|
||||
for (int i = 0; i != vals.length; ++i) {
|
||||
Objects.requireNonNull(vals[i], "value[" + i + "] is empty");
|
||||
}
|
||||
|
||||
if (vals.length == 0) {
|
||||
throw new IllegalArgumentException("values array is empty");
|
||||
}
|
||||
HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter);
|
||||
|
||||
for (int i = 0; i != vals.length; ++i) {
|
||||
Objects.requireNonNull(vals[i], "value[" + i + "] is empty");
|
||||
}
|
||||
Postulate<V> postulate = Postulate.of(Operator.IN, vals);
|
||||
|
||||
HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter);
|
||||
return new Filter<V>(node, postulate);
|
||||
}
|
||||
|
||||
Postulate<V> postulate = Postulate.of(Operator.IN, vals);
|
||||
public static <V> Filter<V> greaterThan(Getter<V> getter, V val) {
|
||||
return create(getter, Operator.GT, val);
|
||||
}
|
||||
|
||||
return new Filter<V>(node, postulate);
|
||||
}
|
||||
public static <V> Filter<V> lessThan(Getter<V> getter, V val) {
|
||||
return create(getter, Operator.LT, val);
|
||||
}
|
||||
|
||||
public static <V> Filter<V> greaterThan(Getter<V> getter, V val) {
|
||||
return create(getter, Operator.GT, val);
|
||||
}
|
||||
public static <V> Filter<V> greaterThanOrEqual(Getter<V> getter, V val) {
|
||||
return create(getter, Operator.GTE, val);
|
||||
}
|
||||
|
||||
public static <V> Filter<V> lessThan(Getter<V> getter, V val) {
|
||||
return create(getter, Operator.LT, val);
|
||||
}
|
||||
public static <V> Filter<V> lessThanOrEqual(Getter<V> getter, V val) {
|
||||
return create(getter, Operator.LTE, val);
|
||||
}
|
||||
|
||||
public static <V> Filter<V> greaterThanOrEqual(Getter<V> getter, V val) {
|
||||
return create(getter, Operator.GTE, val);
|
||||
}
|
||||
public static <V> Filter<V> create(Getter<V> getter, Postulate<V> postulate) {
|
||||
Objects.requireNonNull(getter, "empty getter");
|
||||
Objects.requireNonNull(postulate, "empty operator");
|
||||
|
||||
public static <V> Filter<V> lessThanOrEqual(Getter<V> getter, V val) {
|
||||
return create(getter, Operator.LTE, val);
|
||||
}
|
||||
HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter);
|
||||
|
||||
public static <V> Filter<V> create(Getter<V> getter, Postulate<V> postulate) {
|
||||
Objects.requireNonNull(getter, "empty getter");
|
||||
Objects.requireNonNull(postulate, "empty operator");
|
||||
return new Filter<V>(node, postulate);
|
||||
}
|
||||
|
||||
HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter);
|
||||
public static <V> Filter<V> create(Getter<V> getter, Operator op, V val) {
|
||||
Objects.requireNonNull(getter, "empty getter");
|
||||
Objects.requireNonNull(op, "empty op");
|
||||
Objects.requireNonNull(val, "empty value");
|
||||
|
||||
return new Filter<V>(node, postulate);
|
||||
}
|
||||
if (op == Operator.IN) {
|
||||
throw new IllegalArgumentException("invalid usage of the 'in' operator, use Filter.in() static method");
|
||||
}
|
||||
|
||||
public static <V> Filter<V> create(Getter<V> getter, Operator op, V val) {
|
||||
Objects.requireNonNull(getter, "empty getter");
|
||||
Objects.requireNonNull(op, "empty op");
|
||||
Objects.requireNonNull(val, "empty value");
|
||||
HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter);
|
||||
|
||||
if (op == Operator.IN) {
|
||||
throw new IllegalArgumentException(
|
||||
"invalid usage of the 'in' operator, use Filter.in() static method");
|
||||
}
|
||||
Postulate<V> postulate = Postulate.of(op, val);
|
||||
|
||||
HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter);
|
||||
return new Filter<V>(node, postulate);
|
||||
}
|
||||
|
||||
Postulate<V> postulate = Postulate.of(op, val);
|
||||
public HelenusPropertyNode getNode() {
|
||||
return node;
|
||||
}
|
||||
|
||||
return new Filter<V>(node, postulate);
|
||||
}
|
||||
public Clause getClause(ColumnValuePreparer valuePreparer) {
|
||||
return postulate.getClause(node, valuePreparer);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return node.getColumnName() + postulate.toString();
|
||||
}
|
||||
public V[] postulateValues() {
|
||||
return postulate.values();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return node.getColumnName() + postulate.toString();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,5 +17,5 @@ package net.helenus.core;
|
|||
|
||||
public interface Getter<V> {
|
||||
|
||||
V get();
|
||||
V get();
|
||||
}
|
||||
|
|
|
@ -15,12 +15,17 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
import com.datastax.driver.core.Cluster;
|
||||
import com.datastax.driver.core.Metadata;
|
||||
import com.datastax.driver.core.Session;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
import net.helenus.config.DefaultHelenusSettings;
|
||||
import net.helenus.config.HelenusSettings;
|
||||
import net.helenus.core.reflect.DslExportable;
|
||||
|
@ -30,164 +35,161 @@ import net.helenus.support.HelenusMappingException;
|
|||
|
||||
public final class Helenus {
|
||||
|
||||
private static volatile HelenusSettings settings = new DefaultHelenusSettings();
|
||||
private static final ConcurrentMap<Class<?>, Object> dslCache =
|
||||
new ConcurrentHashMap<Class<?>, Object>();
|
||||
private static final ConcurrentMap<Class<?>, Metadata> metadataForEntity =
|
||||
new ConcurrentHashMap<Class<?>, Metadata>();
|
||||
private static final Set<HelenusSession> sessions = new HashSet<HelenusSession>();
|
||||
private static volatile HelenusSession singleton;
|
||||
private static final ConcurrentMap<Class<?>, Object> dslCache = new ConcurrentHashMap<Class<?>, Object>();
|
||||
private static final ConcurrentMap<Class<?>, Metadata> metadataForEntity = new ConcurrentHashMap<Class<?>, Metadata>();
|
||||
private static final Set<HelenusSession> sessions = new HashSet<HelenusSession>();
|
||||
private static volatile HelenusSettings settings = new DefaultHelenusSettings();
|
||||
private static volatile HelenusSession singleton;
|
||||
|
||||
private Helenus() {}
|
||||
private Helenus() {
|
||||
}
|
||||
|
||||
protected static void setSession(HelenusSession session) {
|
||||
sessions.add(session);
|
||||
singleton = session;
|
||||
}
|
||||
protected static void setSession(HelenusSession session) {
|
||||
sessions.add(session);
|
||||
singleton = session;
|
||||
}
|
||||
|
||||
public static HelenusSession session() {
|
||||
return singleton;
|
||||
}
|
||||
public static HelenusSession session() {
|
||||
return singleton;
|
||||
}
|
||||
|
||||
public static void shutdown() {
|
||||
sessions.forEach(
|
||||
(session) -> {
|
||||
session.close();
|
||||
sessions.remove(session);
|
||||
});
|
||||
dslCache.clear();
|
||||
}
|
||||
public static void shutdown() {
|
||||
sessions.forEach((session) -> {
|
||||
session.close();
|
||||
sessions.remove(session);
|
||||
});
|
||||
dslCache.clear();
|
||||
}
|
||||
|
||||
public static HelenusSettings settings() {
|
||||
return settings;
|
||||
}
|
||||
public static HelenusSettings settings() {
|
||||
return settings;
|
||||
}
|
||||
|
||||
public static HelenusSettings settings(HelenusSettings overrideSettings) {
|
||||
HelenusSettings old = settings;
|
||||
settings = overrideSettings;
|
||||
return old;
|
||||
}
|
||||
public static HelenusSettings settings(HelenusSettings overrideSettings) {
|
||||
HelenusSettings old = settings;
|
||||
settings = overrideSettings;
|
||||
return old;
|
||||
}
|
||||
|
||||
public static SessionInitializer connect(Cluster cluster) {
|
||||
Session session = cluster.connect();
|
||||
return new SessionInitializer(session);
|
||||
}
|
||||
public static SessionInitializer connect(Cluster cluster) {
|
||||
Session session = cluster.connect();
|
||||
return new SessionInitializer(session);
|
||||
}
|
||||
|
||||
public static SessionInitializer connect(Cluster cluster, String keyspace) {
|
||||
Session session = cluster.connect(keyspace);
|
||||
return new SessionInitializer(session);
|
||||
}
|
||||
public static SessionInitializer connect(Cluster cluster, String keyspace) {
|
||||
Session session = cluster.connect(keyspace);
|
||||
return new SessionInitializer(session);
|
||||
}
|
||||
|
||||
public static SessionInitializer init(Session session) {
|
||||
public static SessionInitializer init(Session session) {
|
||||
|
||||
if (session == null) {
|
||||
throw new IllegalArgumentException("empty session");
|
||||
}
|
||||
if (session == null) {
|
||||
throw new IllegalArgumentException("empty session");
|
||||
}
|
||||
|
||||
return new SessionInitializer(session);
|
||||
}
|
||||
return new SessionInitializer(session);
|
||||
}
|
||||
|
||||
public static void clearDslCache() {
|
||||
dslCache.clear();
|
||||
}
|
||||
public static void clearDslCache() {
|
||||
dslCache.clear();
|
||||
}
|
||||
|
||||
public static <E> E dsl(Class<E> iface) {
|
||||
return dsl(iface, null);
|
||||
}
|
||||
public static <E> E dsl(Class<E> iface) {
|
||||
return dsl(iface, null);
|
||||
}
|
||||
|
||||
public static <E> E dsl(Class<E> iface, Metadata metadata) {
|
||||
return dsl(iface, iface.getClassLoader(), Optional.empty(), metadata);
|
||||
}
|
||||
public static <E> E dsl(Class<E> iface, Metadata metadata) {
|
||||
return dsl(iface, iface.getClassLoader(), Optional.empty(), metadata);
|
||||
}
|
||||
|
||||
public static <E> E dsl(Class<E> iface, ClassLoader classLoader, Metadata metadata) {
|
||||
return dsl(iface, classLoader, Optional.empty(), metadata);
|
||||
}
|
||||
public static <E> E dsl(Class<E> iface, ClassLoader classLoader, Metadata metadata) {
|
||||
return dsl(iface, classLoader, Optional.empty(), metadata);
|
||||
}
|
||||
|
||||
public static <E> E dsl(
|
||||
Class<E> iface,
|
||||
ClassLoader classLoader,
|
||||
Optional<HelenusPropertyNode> parent,
|
||||
Metadata metadata) {
|
||||
public static <E> E dsl(Class<E> iface, ClassLoader classLoader, Optional<HelenusPropertyNode> parent,
|
||||
Metadata metadata) {
|
||||
|
||||
Object instance = null;
|
||||
Object instance = null;
|
||||
|
||||
if (!parent.isPresent()) {
|
||||
instance = dslCache.get(iface);
|
||||
}
|
||||
if (!parent.isPresent()) {
|
||||
instance = dslCache.get(iface);
|
||||
}
|
||||
|
||||
if (instance == null) {
|
||||
if (instance == null) {
|
||||
|
||||
instance = settings.getDslInstantiator().instantiate(iface, classLoader, parent, metadata);
|
||||
instance = settings.getDslInstantiator().instantiate(iface, classLoader, parent, metadata);
|
||||
|
||||
if (!parent.isPresent()) {
|
||||
if (!parent.isPresent()) {
|
||||
|
||||
Object c = dslCache.putIfAbsent(iface, instance);
|
||||
if (c != null) {
|
||||
instance = c;
|
||||
}
|
||||
}
|
||||
}
|
||||
Object c = dslCache.putIfAbsent(iface, instance);
|
||||
if (c != null) {
|
||||
instance = c;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return (E) instance;
|
||||
}
|
||||
return (E) instance;
|
||||
}
|
||||
|
||||
public static <E> E map(Class<E> iface, Map<String, Object> src) {
|
||||
return map(iface, src, iface.getClassLoader());
|
||||
}
|
||||
public static <E> E map(Class<E> iface, Map<String, Object> src) {
|
||||
return map(iface, src, iface.getClassLoader());
|
||||
}
|
||||
|
||||
public static <E> E map(Class<E> iface, Map<String, Object> src, ClassLoader classLoader) {
|
||||
return settings.getMapperInstantiator().instantiate(iface, src, classLoader);
|
||||
}
|
||||
public static <E> E map(Class<E> iface, Map<String, Object> src, ClassLoader classLoader) {
|
||||
return settings.getMapperInstantiator().instantiate(iface, src, classLoader);
|
||||
}
|
||||
|
||||
public static HelenusEntity entity(Class<?> iface) {
|
||||
Metadata metadata = metadataForEntity.get(iface);
|
||||
if (metadata == null) {
|
||||
HelenusSession session = session();
|
||||
if (session != null) {
|
||||
metadata = session.getMetadata();
|
||||
}
|
||||
}
|
||||
return entity(iface, metadata);
|
||||
}
|
||||
public static HelenusEntity entity(Class<?> iface) {
|
||||
Metadata metadata = metadataForEntity.get(iface);
|
||||
if (metadata == null) {
|
||||
HelenusSession session = session();
|
||||
if (session != null) {
|
||||
metadata = session.getMetadata();
|
||||
}
|
||||
}
|
||||
return entity(iface, metadata);
|
||||
}
|
||||
|
||||
public static HelenusEntity entity(Class<?> iface, Metadata metadata) {
|
||||
public static HelenusEntity entity(Class<?> iface, Metadata metadata) {
|
||||
|
||||
Object dsl = dsl(iface, metadata);
|
||||
Object dsl = dsl(iface, metadata);
|
||||
|
||||
DslExportable e = (DslExportable) dsl;
|
||||
DslExportable e = (DslExportable) dsl;
|
||||
|
||||
return e.getHelenusMappingEntity();
|
||||
}
|
||||
return e.getHelenusMappingEntity();
|
||||
}
|
||||
|
||||
public static HelenusEntity resolve(Object ifaceOrDsl) {
|
||||
return resolve(ifaceOrDsl, metadataForEntity.get(ifaceOrDsl));
|
||||
}
|
||||
public static HelenusEntity resolve(Object ifaceOrDsl) {
|
||||
return resolve(ifaceOrDsl, metadataForEntity.get(ifaceOrDsl));
|
||||
}
|
||||
|
||||
public static HelenusEntity resolve(Object ifaceOrDsl, Metadata metadata) {
|
||||
public static HelenusEntity resolve(Object ifaceOrDsl, Metadata metadata) {
|
||||
|
||||
if (ifaceOrDsl == null) {
|
||||
throw new HelenusMappingException("ifaceOrDsl is null");
|
||||
}
|
||||
if (ifaceOrDsl == null) {
|
||||
throw new HelenusMappingException("ifaceOrDsl is null");
|
||||
}
|
||||
|
||||
if (ifaceOrDsl instanceof DslExportable) {
|
||||
if (ifaceOrDsl instanceof DslExportable) {
|
||||
|
||||
DslExportable e = (DslExportable) ifaceOrDsl;
|
||||
DslExportable e = (DslExportable) ifaceOrDsl;
|
||||
|
||||
return e.getHelenusMappingEntity();
|
||||
}
|
||||
return e.getHelenusMappingEntity();
|
||||
}
|
||||
|
||||
if (ifaceOrDsl instanceof Class) {
|
||||
if (ifaceOrDsl instanceof Class) {
|
||||
|
||||
Class<?> iface = (Class<?>) ifaceOrDsl;
|
||||
Class<?> iface = (Class<?>) ifaceOrDsl;
|
||||
|
||||
if (!iface.isInterface()) {
|
||||
throw new HelenusMappingException("class is not an interface " + iface);
|
||||
}
|
||||
if (!iface.isInterface()) {
|
||||
throw new HelenusMappingException("class is not an interface " + iface);
|
||||
}
|
||||
|
||||
metadataForEntity.putIfAbsent(iface, metadata);
|
||||
return entity(iface, metadata);
|
||||
}
|
||||
if (metadata != null) {
|
||||
metadataForEntity.putIfAbsent(iface, metadata);
|
||||
}
|
||||
return entity(iface, metadata);
|
||||
}
|
||||
|
||||
throw new HelenusMappingException("unknown dsl object or mapping interface " + ifaceOrDsl);
|
||||
}
|
||||
throw new HelenusMappingException("unknown dsl object or mapping interface " + ifaceOrDsl);
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -16,32 +16,33 @@
|
|||
package net.helenus.core;
|
||||
|
||||
import java.lang.annotation.Annotation;
|
||||
|
||||
import javax.validation.ConstraintValidator;
|
||||
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
import net.helenus.support.HelenusException;
|
||||
import net.helenus.support.HelenusMappingException;
|
||||
|
||||
public enum HelenusValidator implements PropertyValueValidator {
|
||||
INSTANCE;
|
||||
INSTANCE;
|
||||
|
||||
public void validate(HelenusProperty prop, Object value) {
|
||||
public void validate(HelenusProperty prop, Object value) {
|
||||
|
||||
for (ConstraintValidator<? extends Annotation, ?> validator : prop.getValidators()) {
|
||||
for (ConstraintValidator<? extends Annotation, ?> validator : prop.getValidators()) {
|
||||
|
||||
ConstraintValidator typeless = (ConstraintValidator) validator;
|
||||
ConstraintValidator typeless = (ConstraintValidator) validator;
|
||||
|
||||
boolean valid = false;
|
||||
boolean valid = false;
|
||||
|
||||
try {
|
||||
valid = typeless.isValid(value, null);
|
||||
} catch (ClassCastException e) {
|
||||
throw new HelenusMappingException(
|
||||
"validator was used for wrong type '" + value + "' in " + prop, e);
|
||||
}
|
||||
try {
|
||||
valid = typeless.isValid(value, null);
|
||||
} catch (ClassCastException e) {
|
||||
throw new HelenusMappingException("validator was used for wrong type '" + value + "' in " + prop, e);
|
||||
}
|
||||
|
||||
if (!valid) {
|
||||
throw new HelenusException("wrong value '" + value + "' for " + prop);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!valid) {
|
||||
throw new HelenusException("wrong value '" + value + "' for " + prop);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,5 +19,5 @@ import java.util.Map;
|
|||
|
||||
public interface MapperInstantiator {
|
||||
|
||||
<E> E instantiate(Class<E> iface, Map<String, Object> src, ClassLoader classLoader);
|
||||
<E> E instantiate(Class<E> iface, Map<String, Object> src, ClassLoader classLoader);
|
||||
}
|
||||
|
|
|
@ -15,8 +15,10 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import com.datastax.driver.core.Row;
|
||||
import java.util.function.Function;
|
||||
|
||||
import com.datastax.driver.core.Row;
|
||||
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
import net.helenus.mapping.value.ColumnValueProvider;
|
||||
|
@ -24,203 +26,161 @@ import net.helenus.support.Fun;
|
|||
|
||||
public final class Mappers {
|
||||
|
||||
private Mappers() {}
|
||||
private Mappers() {
|
||||
}
|
||||
|
||||
public static final class Mapper1<A> implements Function<Row, Fun.Tuple1<A>> {
|
||||
public static final class Mapper1<A> implements Function<Row, Fun.Tuple1<A>> {
|
||||
|
||||
private final ColumnValueProvider provider;
|
||||
private final HelenusProperty p1;
|
||||
private final ColumnValueProvider provider;
|
||||
private final HelenusProperty p1;
|
||||
|
||||
public Mapper1(ColumnValueProvider provider, HelenusPropertyNode p1) {
|
||||
this.provider = provider;
|
||||
this.p1 = p1.getProperty();
|
||||
}
|
||||
public Mapper1(ColumnValueProvider provider, HelenusPropertyNode p1) {
|
||||
this.provider = provider;
|
||||
this.p1 = p1.getProperty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Fun.Tuple1<A> apply(Row row) {
|
||||
return new Fun.Tuple1<A>(provider.getColumnValue(row, 0, p1));
|
||||
}
|
||||
}
|
||||
@Override
|
||||
public Fun.Tuple1<A> apply(Row row) {
|
||||
return new Fun.Tuple1<A>(provider.getColumnValue(row, 0, p1));
|
||||
}
|
||||
}
|
||||
|
||||
public static final class Mapper2<A, B> implements Function<Row, Fun.Tuple2<A, B>> {
|
||||
public static final class Mapper2<A, B> implements Function<Row, Fun.Tuple2<A, B>> {
|
||||
|
||||
private final ColumnValueProvider provider;
|
||||
private final HelenusProperty p1;
|
||||
private final HelenusProperty p2;
|
||||
private final ColumnValueProvider provider;
|
||||
private final HelenusProperty p1;
|
||||
private final HelenusProperty p2;
|
||||
|
||||
public Mapper2(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2) {
|
||||
this.provider = provider;
|
||||
this.p1 = p1.getProperty();
|
||||
this.p2 = p2.getProperty();
|
||||
}
|
||||
public Mapper2(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2) {
|
||||
this.provider = provider;
|
||||
this.p1 = p1.getProperty();
|
||||
this.p2 = p2.getProperty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Fun.Tuple2<A, B> apply(Row row) {
|
||||
return new Fun.Tuple2<A, B>(
|
||||
provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2));
|
||||
}
|
||||
}
|
||||
@Override
|
||||
public Fun.Tuple2<A, B> apply(Row row) {
|
||||
return new Fun.Tuple2<A, B>(provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2));
|
||||
}
|
||||
}
|
||||
|
||||
public static final class Mapper3<A, B, C> implements Function<Row, Fun.Tuple3<A, B, C>> {
|
||||
public static final class Mapper3<A, B, C> implements Function<Row, Fun.Tuple3<A, B, C>> {
|
||||
|
||||
private final ColumnValueProvider provider;
|
||||
private final HelenusProperty p1;
|
||||
private final HelenusProperty p2;
|
||||
private final HelenusProperty p3;
|
||||
private final ColumnValueProvider provider;
|
||||
private final HelenusProperty p1;
|
||||
private final HelenusProperty p2;
|
||||
private final HelenusProperty p3;
|
||||
|
||||
public Mapper3(
|
||||
ColumnValueProvider provider,
|
||||
HelenusPropertyNode p1,
|
||||
HelenusPropertyNode p2,
|
||||
HelenusPropertyNode p3) {
|
||||
this.provider = provider;
|
||||
this.p1 = p1.getProperty();
|
||||
this.p2 = p2.getProperty();
|
||||
this.p3 = p3.getProperty();
|
||||
}
|
||||
public Mapper3(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2,
|
||||
HelenusPropertyNode p3) {
|
||||
this.provider = provider;
|
||||
this.p1 = p1.getProperty();
|
||||
this.p2 = p2.getProperty();
|
||||
this.p3 = p3.getProperty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Fun.Tuple3<A, B, C> apply(Row row) {
|
||||
return new Fun.Tuple3<A, B, C>(
|
||||
provider.getColumnValue(row, 0, p1),
|
||||
provider.getColumnValue(row, 1, p2),
|
||||
provider.getColumnValue(row, 2, p3));
|
||||
}
|
||||
}
|
||||
@Override
|
||||
public Fun.Tuple3<A, B, C> apply(Row row) {
|
||||
return new Fun.Tuple3<A, B, C>(provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2),
|
||||
provider.getColumnValue(row, 2, p3));
|
||||
}
|
||||
}
|
||||
|
||||
public static final class Mapper4<A, B, C, D> implements Function<Row, Fun.Tuple4<A, B, C, D>> {
|
||||
public static final class Mapper4<A, B, C, D> implements Function<Row, Fun.Tuple4<A, B, C, D>> {
|
||||
|
||||
private final ColumnValueProvider provider;
|
||||
private final HelenusProperty p1;
|
||||
private final HelenusProperty p2;
|
||||
private final HelenusProperty p3;
|
||||
private final HelenusProperty p4;
|
||||
private final ColumnValueProvider provider;
|
||||
private final HelenusProperty p1;
|
||||
private final HelenusProperty p2;
|
||||
private final HelenusProperty p3;
|
||||
private final HelenusProperty p4;
|
||||
|
||||
public Mapper4(
|
||||
ColumnValueProvider provider,
|
||||
HelenusPropertyNode p1,
|
||||
HelenusPropertyNode p2,
|
||||
HelenusPropertyNode p3,
|
||||
HelenusPropertyNode p4) {
|
||||
this.provider = provider;
|
||||
this.p1 = p1.getProperty();
|
||||
this.p2 = p2.getProperty();
|
||||
this.p3 = p3.getProperty();
|
||||
this.p4 = p4.getProperty();
|
||||
}
|
||||
public Mapper4(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2,
|
||||
HelenusPropertyNode p3, HelenusPropertyNode p4) {
|
||||
this.provider = provider;
|
||||
this.p1 = p1.getProperty();
|
||||
this.p2 = p2.getProperty();
|
||||
this.p3 = p3.getProperty();
|
||||
this.p4 = p4.getProperty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Fun.Tuple4<A, B, C, D> apply(Row row) {
|
||||
return new Fun.Tuple4<A, B, C, D>(
|
||||
provider.getColumnValue(row, 0, p1),
|
||||
provider.getColumnValue(row, 1, p2),
|
||||
provider.getColumnValue(row, 2, p3),
|
||||
provider.getColumnValue(row, 3, p4));
|
||||
}
|
||||
}
|
||||
@Override
|
||||
public Fun.Tuple4<A, B, C, D> apply(Row row) {
|
||||
return new Fun.Tuple4<A, B, C, D>(provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2),
|
||||
provider.getColumnValue(row, 2, p3), provider.getColumnValue(row, 3, p4));
|
||||
}
|
||||
}
|
||||
|
||||
public static final class Mapper5<A, B, C, D, E>
|
||||
implements Function<Row, Fun.Tuple5<A, B, C, D, E>> {
|
||||
public static final class Mapper5<A, B, C, D, E> implements Function<Row, Fun.Tuple5<A, B, C, D, E>> {
|
||||
|
||||
private final ColumnValueProvider provider;
|
||||
private final HelenusProperty p1, p2, p3, p4, p5;
|
||||
private final ColumnValueProvider provider;
|
||||
private final HelenusProperty p1, p2, p3, p4, p5;
|
||||
|
||||
public Mapper5(
|
||||
ColumnValueProvider provider,
|
||||
HelenusPropertyNode p1,
|
||||
HelenusPropertyNode p2,
|
||||
HelenusPropertyNode p3,
|
||||
HelenusPropertyNode p4,
|
||||
HelenusPropertyNode p5) {
|
||||
this.provider = provider;
|
||||
this.p1 = p1.getProperty();
|
||||
this.p2 = p2.getProperty();
|
||||
this.p3 = p3.getProperty();
|
||||
this.p4 = p4.getProperty();
|
||||
this.p5 = p5.getProperty();
|
||||
}
|
||||
public Mapper5(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2,
|
||||
HelenusPropertyNode p3, HelenusPropertyNode p4, HelenusPropertyNode p5) {
|
||||
this.provider = provider;
|
||||
this.p1 = p1.getProperty();
|
||||
this.p2 = p2.getProperty();
|
||||
this.p3 = p3.getProperty();
|
||||
this.p4 = p4.getProperty();
|
||||
this.p5 = p5.getProperty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Fun.Tuple5<A, B, C, D, E> apply(Row row) {
|
||||
return new Fun.Tuple5<A, B, C, D, E>(
|
||||
provider.getColumnValue(row, 0, p1),
|
||||
provider.getColumnValue(row, 1, p2),
|
||||
provider.getColumnValue(row, 2, p3),
|
||||
provider.getColumnValue(row, 3, p4),
|
||||
provider.getColumnValue(row, 4, p5));
|
||||
}
|
||||
}
|
||||
@Override
|
||||
public Fun.Tuple5<A, B, C, D, E> apply(Row row) {
|
||||
return new Fun.Tuple5<A, B, C, D, E>(provider.getColumnValue(row, 0, p1),
|
||||
provider.getColumnValue(row, 1, p2), provider.getColumnValue(row, 2, p3),
|
||||
provider.getColumnValue(row, 3, p4), provider.getColumnValue(row, 4, p5));
|
||||
}
|
||||
}
|
||||
|
||||
public static final class Mapper6<A, B, C, D, E, F>
|
||||
implements Function<Row, Fun.Tuple6<A, B, C, D, E, F>> {
|
||||
public static final class Mapper6<A, B, C, D, E, F> implements Function<Row, Fun.Tuple6<A, B, C, D, E, F>> {
|
||||
|
||||
private final ColumnValueProvider provider;
|
||||
private final HelenusProperty p1, p2, p3, p4, p5, p6;
|
||||
private final ColumnValueProvider provider;
|
||||
private final HelenusProperty p1, p2, p3, p4, p5, p6;
|
||||
|
||||
public Mapper6(
|
||||
ColumnValueProvider provider,
|
||||
HelenusPropertyNode p1,
|
||||
HelenusPropertyNode p2,
|
||||
HelenusPropertyNode p3,
|
||||
HelenusPropertyNode p4,
|
||||
HelenusPropertyNode p5,
|
||||
HelenusPropertyNode p6) {
|
||||
this.provider = provider;
|
||||
this.p1 = p1.getProperty();
|
||||
this.p2 = p2.getProperty();
|
||||
this.p3 = p3.getProperty();
|
||||
this.p4 = p4.getProperty();
|
||||
this.p5 = p5.getProperty();
|
||||
this.p6 = p6.getProperty();
|
||||
}
|
||||
public Mapper6(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2,
|
||||
HelenusPropertyNode p3, HelenusPropertyNode p4, HelenusPropertyNode p5, HelenusPropertyNode p6) {
|
||||
this.provider = provider;
|
||||
this.p1 = p1.getProperty();
|
||||
this.p2 = p2.getProperty();
|
||||
this.p3 = p3.getProperty();
|
||||
this.p4 = p4.getProperty();
|
||||
this.p5 = p5.getProperty();
|
||||
this.p6 = p6.getProperty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Fun.Tuple6<A, B, C, D, E, F> apply(Row row) {
|
||||
return new Fun.Tuple6<A, B, C, D, E, F>(
|
||||
provider.getColumnValue(row, 0, p1),
|
||||
provider.getColumnValue(row, 1, p2),
|
||||
provider.getColumnValue(row, 2, p3),
|
||||
provider.getColumnValue(row, 3, p4),
|
||||
provider.getColumnValue(row, 4, p5),
|
||||
provider.getColumnValue(row, 5, p6));
|
||||
}
|
||||
}
|
||||
@Override
|
||||
public Fun.Tuple6<A, B, C, D, E, F> apply(Row row) {
|
||||
return new Fun.Tuple6<A, B, C, D, E, F>(provider.getColumnValue(row, 0, p1),
|
||||
provider.getColumnValue(row, 1, p2), provider.getColumnValue(row, 2, p3),
|
||||
provider.getColumnValue(row, 3, p4), provider.getColumnValue(row, 4, p5),
|
||||
provider.getColumnValue(row, 5, p6));
|
||||
}
|
||||
}
|
||||
|
||||
public static final class Mapper7<A, B, C, D, E, F, G>
|
||||
implements Function<Row, Fun.Tuple7<A, B, C, D, E, F, G>> {
|
||||
public static final class Mapper7<A, B, C, D, E, F, G> implements Function<Row, Fun.Tuple7<A, B, C, D, E, F, G>> {
|
||||
|
||||
private final ColumnValueProvider provider;
|
||||
private final HelenusProperty p1, p2, p3, p4, p5, p6, p7;
|
||||
private final ColumnValueProvider provider;
|
||||
private final HelenusProperty p1, p2, p3, p4, p5, p6, p7;
|
||||
|
||||
public Mapper7(
|
||||
ColumnValueProvider provider,
|
||||
HelenusPropertyNode p1,
|
||||
HelenusPropertyNode p2,
|
||||
HelenusPropertyNode p3,
|
||||
HelenusPropertyNode p4,
|
||||
HelenusPropertyNode p5,
|
||||
HelenusPropertyNode p6,
|
||||
HelenusPropertyNode p7) {
|
||||
this.provider = provider;
|
||||
this.p1 = p1.getProperty();
|
||||
this.p2 = p2.getProperty();
|
||||
this.p3 = p3.getProperty();
|
||||
this.p4 = p4.getProperty();
|
||||
this.p5 = p5.getProperty();
|
||||
this.p6 = p6.getProperty();
|
||||
this.p7 = p7.getProperty();
|
||||
}
|
||||
public Mapper7(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2,
|
||||
HelenusPropertyNode p3, HelenusPropertyNode p4, HelenusPropertyNode p5, HelenusPropertyNode p6,
|
||||
HelenusPropertyNode p7) {
|
||||
this.provider = provider;
|
||||
this.p1 = p1.getProperty();
|
||||
this.p2 = p2.getProperty();
|
||||
this.p3 = p3.getProperty();
|
||||
this.p4 = p4.getProperty();
|
||||
this.p5 = p5.getProperty();
|
||||
this.p6 = p6.getProperty();
|
||||
this.p7 = p7.getProperty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Fun.Tuple7<A, B, C, D, E, F, G> apply(Row row) {
|
||||
return new Fun.Tuple7<A, B, C, D, E, F, G>(
|
||||
provider.getColumnValue(row, 0, p1),
|
||||
provider.getColumnValue(row, 1, p2),
|
||||
provider.getColumnValue(row, 2, p3),
|
||||
provider.getColumnValue(row, 3, p4),
|
||||
provider.getColumnValue(row, 4, p5),
|
||||
provider.getColumnValue(row, 5, p6),
|
||||
provider.getColumnValue(row, 6, p7));
|
||||
}
|
||||
}
|
||||
@Override
|
||||
public Fun.Tuple7<A, B, C, D, E, F, G> apply(Row row) {
|
||||
return new Fun.Tuple7<A, B, C, D, E, F, G>(provider.getColumnValue(row, 0, p1),
|
||||
provider.getColumnValue(row, 1, p2), provider.getColumnValue(row, 2, p3),
|
||||
provider.getColumnValue(row, 3, p4), provider.getColumnValue(row, 4, p5),
|
||||
provider.getColumnValue(row, 5, p6), provider.getColumnValue(row, 6, p7));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,37 +19,37 @@ import java.util.HashMap;
|
|||
import java.util.Map;
|
||||
|
||||
public enum Operator {
|
||||
EQ("=="),
|
||||
EQ("=="),
|
||||
|
||||
IN("in"),
|
||||
IN("in"),
|
||||
|
||||
GT(">"),
|
||||
GT(">"),
|
||||
|
||||
LT("<"),
|
||||
LT("<"),
|
||||
|
||||
GTE(">="),
|
||||
GTE(">="),
|
||||
|
||||
LTE("<=");
|
||||
LTE("<=");
|
||||
|
||||
private final String name;
|
||||
private static final Map<String, Operator> indexByName = new HashMap<String, Operator>();
|
||||
|
||||
private static final Map<String, Operator> indexByName = new HashMap<String, Operator>();
|
||||
static {
|
||||
for (Operator fo : Operator.values()) {
|
||||
indexByName.put(fo.getName(), fo);
|
||||
}
|
||||
}
|
||||
|
||||
static {
|
||||
for (Operator fo : Operator.values()) {
|
||||
indexByName.put(fo.getName(), fo);
|
||||
}
|
||||
}
|
||||
private final String name;
|
||||
|
||||
private Operator(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
private Operator(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
public static Operator findByOperator(String name) {
|
||||
return indexByName.get(name);
|
||||
}
|
||||
|
||||
public static Operator findByOperator(String name) {
|
||||
return indexByName.get(name);
|
||||
}
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,8 +1,10 @@
|
|||
package net.helenus.core;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
import com.datastax.driver.core.querybuilder.Ordering;
|
||||
import com.datastax.driver.core.querybuilder.QueryBuilder;
|
||||
import java.util.Objects;
|
||||
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.mapping.ColumnType;
|
||||
import net.helenus.mapping.MappingUtil;
|
||||
|
@ -11,34 +13,34 @@ import net.helenus.support.HelenusMappingException;
|
|||
|
||||
public final class Ordered {
|
||||
|
||||
private final Getter<?> getter;
|
||||
private final OrderingDirection direction;
|
||||
private final Getter<?> getter;
|
||||
private final OrderingDirection direction;
|
||||
|
||||
public Ordered(Getter<?> getter, OrderingDirection direction) {
|
||||
this.getter = getter;
|
||||
this.direction = direction;
|
||||
}
|
||||
public Ordered(Getter<?> getter, OrderingDirection direction) {
|
||||
this.getter = getter;
|
||||
this.direction = direction;
|
||||
}
|
||||
|
||||
public Ordering getOrdering() {
|
||||
public Ordering getOrdering() {
|
||||
|
||||
Objects.requireNonNull(getter, "property is null");
|
||||
Objects.requireNonNull(direction, "direction is null");
|
||||
Objects.requireNonNull(getter, "property is null");
|
||||
Objects.requireNonNull(direction, "direction is null");
|
||||
|
||||
HelenusPropertyNode propNode = MappingUtil.resolveMappingProperty(getter);
|
||||
HelenusPropertyNode propNode = MappingUtil.resolveMappingProperty(getter);
|
||||
|
||||
if (propNode.getProperty().getColumnType() != ColumnType.CLUSTERING_COLUMN) {
|
||||
throw new HelenusMappingException(
|
||||
"property must be a clustering column " + propNode.getProperty().getPropertyName());
|
||||
}
|
||||
if (propNode.getProperty().getColumnType() != ColumnType.CLUSTERING_COLUMN) {
|
||||
throw new HelenusMappingException(
|
||||
"property must be a clustering column " + propNode.getProperty().getPropertyName());
|
||||
}
|
||||
|
||||
switch (direction) {
|
||||
case ASC:
|
||||
return QueryBuilder.asc(propNode.getColumnName());
|
||||
switch (direction) {
|
||||
case ASC :
|
||||
return QueryBuilder.asc(propNode.getColumnName());
|
||||
|
||||
case DESC:
|
||||
return QueryBuilder.desc(propNode.getColumnName());
|
||||
}
|
||||
case DESC :
|
||||
return QueryBuilder.desc(propNode.getColumnName());
|
||||
}
|
||||
|
||||
throw new HelenusMappingException("invalid direction " + direction);
|
||||
}
|
||||
throw new HelenusMappingException("invalid direction " + direction);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,29 +1,29 @@
|
|||
package net.helenus.core;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
public class PostCommitFunction<T, R> implements java.util.function.Function<T, R> {
|
||||
|
||||
private final UnitOfWork uow;
|
||||
private final List<CommitThunk> postCommit;
|
||||
private final UnitOfWork uow;
|
||||
private final List<CommitThunk> postCommit;
|
||||
|
||||
PostCommitFunction(UnitOfWork uow, List<CommitThunk> postCommit) {
|
||||
this.uow = uow;
|
||||
this.postCommit = postCommit;
|
||||
}
|
||||
PostCommitFunction(UnitOfWork uow, List<CommitThunk> postCommit) {
|
||||
this.uow = uow;
|
||||
this.postCommit = postCommit;
|
||||
}
|
||||
|
||||
public void andThen(CommitThunk after) {
|
||||
Objects.requireNonNull(after);
|
||||
if (postCommit == null) {
|
||||
after.apply();
|
||||
} else {
|
||||
postCommit.add(after);
|
||||
}
|
||||
}
|
||||
public void andThen(CommitThunk after) {
|
||||
Objects.requireNonNull(after);
|
||||
if (postCommit == null) {
|
||||
after.apply();
|
||||
} else {
|
||||
postCommit.add(after);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public R apply(T t) {
|
||||
return null;
|
||||
}
|
||||
@Override
|
||||
public R apply(T t) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,80 +17,85 @@ package net.helenus.core;
|
|||
|
||||
import com.datastax.driver.core.querybuilder.Clause;
|
||||
import com.datastax.driver.core.querybuilder.QueryBuilder;
|
||||
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.mapping.value.ColumnValuePreparer;
|
||||
import net.helenus.support.HelenusMappingException;
|
||||
|
||||
public final class Postulate<V> {
|
||||
|
||||
private final Operator operator;
|
||||
private final V[] values;
|
||||
private final Operator operator;
|
||||
private final V[] values;
|
||||
|
||||
protected Postulate(Operator op, V[] values) {
|
||||
this.operator = op;
|
||||
this.values = values;
|
||||
}
|
||||
protected Postulate(Operator op, V[] values) {
|
||||
this.operator = op;
|
||||
this.values = values;
|
||||
}
|
||||
|
||||
public static <V> Postulate<V> of(Operator op, V... values) {
|
||||
return new Postulate<V>(op, values);
|
||||
}
|
||||
public static <V> Postulate<V> of(Operator op, V... values) {
|
||||
return new Postulate<V>(op, values);
|
||||
}
|
||||
|
||||
public Clause getClause(HelenusPropertyNode node, ColumnValuePreparer valuePreparer) {
|
||||
public Clause getClause(HelenusPropertyNode node, ColumnValuePreparer valuePreparer) {
|
||||
|
||||
switch (operator) {
|
||||
case EQ:
|
||||
return QueryBuilder.eq(
|
||||
node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty()));
|
||||
switch (operator) {
|
||||
case EQ :
|
||||
return QueryBuilder.eq(node.getColumnName(),
|
||||
valuePreparer.prepareColumnValue(values[0], node.getProperty()));
|
||||
|
||||
case IN:
|
||||
Object[] preparedValues = new Object[values.length];
|
||||
for (int i = 0; i != values.length; ++i) {
|
||||
preparedValues[i] = valuePreparer.prepareColumnValue(values[i], node.getProperty());
|
||||
}
|
||||
return QueryBuilder.in(node.getColumnName(), preparedValues);
|
||||
case IN :
|
||||
Object[] preparedValues = new Object[values.length];
|
||||
for (int i = 0; i != values.length; ++i) {
|
||||
preparedValues[i] = valuePreparer.prepareColumnValue(values[i], node.getProperty());
|
||||
}
|
||||
return QueryBuilder.in(node.getColumnName(), preparedValues);
|
||||
|
||||
case LT:
|
||||
return QueryBuilder.lt(
|
||||
node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty()));
|
||||
case LT :
|
||||
return QueryBuilder.lt(node.getColumnName(),
|
||||
valuePreparer.prepareColumnValue(values[0], node.getProperty()));
|
||||
|
||||
case LTE:
|
||||
return QueryBuilder.lte(
|
||||
node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty()));
|
||||
case LTE :
|
||||
return QueryBuilder.lte(node.getColumnName(),
|
||||
valuePreparer.prepareColumnValue(values[0], node.getProperty()));
|
||||
|
||||
case GT:
|
||||
return QueryBuilder.gt(
|
||||
node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty()));
|
||||
case GT :
|
||||
return QueryBuilder.gt(node.getColumnName(),
|
||||
valuePreparer.prepareColumnValue(values[0], node.getProperty()));
|
||||
|
||||
case GTE:
|
||||
return QueryBuilder.gte(
|
||||
node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty()));
|
||||
case GTE :
|
||||
return QueryBuilder.gte(node.getColumnName(),
|
||||
valuePreparer.prepareColumnValue(values[0], node.getProperty()));
|
||||
|
||||
default:
|
||||
throw new HelenusMappingException("unknown filter operation " + operator);
|
||||
}
|
||||
}
|
||||
default :
|
||||
throw new HelenusMappingException("unknown filter operation " + operator);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
public V[] values() {
|
||||
return values;
|
||||
}
|
||||
|
||||
if (operator == Operator.IN) {
|
||||
@Override
|
||||
public String toString() {
|
||||
|
||||
if (values == null) {
|
||||
return "in()";
|
||||
}
|
||||
if (operator == Operator.IN) {
|
||||
|
||||
int len = values.length;
|
||||
StringBuilder b = new StringBuilder();
|
||||
b.append("in(");
|
||||
for (int i = 0; i != len; i++) {
|
||||
if (b.length() > 3) {
|
||||
b.append(", ");
|
||||
}
|
||||
b.append(String.valueOf(values[i]));
|
||||
}
|
||||
return b.append(')').toString();
|
||||
}
|
||||
if (values == null) {
|
||||
return "in()";
|
||||
}
|
||||
|
||||
return operator.getName() + values[0];
|
||||
}
|
||||
int len = values.length;
|
||||
StringBuilder b = new StringBuilder();
|
||||
b.append("in(");
|
||||
for (int i = 0; i != len; i++) {
|
||||
if (b.length() > 3) {
|
||||
b.append(", ");
|
||||
}
|
||||
b.append(String.valueOf(values[i]));
|
||||
}
|
||||
return b.append(')').toString();
|
||||
}
|
||||
|
||||
return operator.getName() + values[0];
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,5 +19,5 @@ import net.helenus.mapping.HelenusProperty;
|
|||
|
||||
public interface PropertyValueValidator {
|
||||
|
||||
void validate(HelenusProperty prop, Object value);
|
||||
void validate(HelenusProperty prop, Object value);
|
||||
}
|
||||
|
|
|
@ -15,80 +15,83 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import com.datastax.driver.core.querybuilder.BindMarker;
|
||||
import com.datastax.driver.core.querybuilder.QueryBuilder;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import com.datastax.driver.core.querybuilder.BindMarker;
|
||||
import com.datastax.driver.core.querybuilder.QueryBuilder;
|
||||
|
||||
import net.helenus.mapping.OrderingDirection;
|
||||
|
||||
/** Sugar methods for the queries */
|
||||
public final class Query {
|
||||
|
||||
private Query() {}
|
||||
private Query() {
|
||||
}
|
||||
|
||||
public static BindMarker marker() {
|
||||
return QueryBuilder.bindMarker();
|
||||
}
|
||||
public static BindMarker marker() {
|
||||
return QueryBuilder.bindMarker();
|
||||
}
|
||||
|
||||
public static BindMarker marker(String name) {
|
||||
return QueryBuilder.bindMarker(name);
|
||||
}
|
||||
public static BindMarker marker(String name) {
|
||||
return QueryBuilder.bindMarker(name);
|
||||
}
|
||||
|
||||
public static Ordered asc(Getter<?> getter) {
|
||||
return new Ordered(getter, OrderingDirection.ASC);
|
||||
}
|
||||
public static Ordered asc(Getter<?> getter) {
|
||||
return new Ordered(getter, OrderingDirection.ASC);
|
||||
}
|
||||
|
||||
public static Ordered desc(Getter<?> getter) {
|
||||
return new Ordered(getter, OrderingDirection.DESC);
|
||||
}
|
||||
public static Ordered desc(Getter<?> getter) {
|
||||
return new Ordered(getter, OrderingDirection.DESC);
|
||||
}
|
||||
|
||||
public static <V> Postulate<V> eq(V val) {
|
||||
return Postulate.of(Operator.EQ, val);
|
||||
}
|
||||
public static <V> Postulate<V> eq(V val) {
|
||||
return Postulate.of(Operator.EQ, val);
|
||||
}
|
||||
|
||||
public static <V> Postulate<V> lt(V val) {
|
||||
return Postulate.of(Operator.LT, val);
|
||||
}
|
||||
public static <V> Postulate<V> lt(V val) {
|
||||
return Postulate.of(Operator.LT, val);
|
||||
}
|
||||
|
||||
public static <V> Postulate<V> lte(V val) {
|
||||
return Postulate.of(Operator.LTE, val);
|
||||
}
|
||||
public static <V> Postulate<V> lte(V val) {
|
||||
return Postulate.of(Operator.LTE, val);
|
||||
}
|
||||
|
||||
public static <V> Postulate<V> gt(V val) {
|
||||
return Postulate.of(Operator.GT, val);
|
||||
}
|
||||
public static <V> Postulate<V> gt(V val) {
|
||||
return Postulate.of(Operator.GT, val);
|
||||
}
|
||||
|
||||
public static <V> Postulate<V> gte(V val) {
|
||||
return Postulate.of(Operator.GTE, val);
|
||||
}
|
||||
public static <V> Postulate<V> gte(V val) {
|
||||
return Postulate.of(Operator.GTE, val);
|
||||
}
|
||||
|
||||
public static <V> Postulate<V> in(V[] vals) {
|
||||
return new Postulate<V>(Operator.IN, vals);
|
||||
}
|
||||
public static <V> Postulate<V> in(V[] vals) {
|
||||
return new Postulate<V>(Operator.IN, vals);
|
||||
}
|
||||
|
||||
public static <K, V> Getter<V> getIdx(Getter<List<V>> listGetter, int index) {
|
||||
Objects.requireNonNull(listGetter, "listGetter is null");
|
||||
public static <K, V> Getter<V> getIdx(Getter<List<V>> listGetter, int index) {
|
||||
Objects.requireNonNull(listGetter, "listGetter is null");
|
||||
|
||||
return new Getter<V>() {
|
||||
return new Getter<V>() {
|
||||
|
||||
@Override
|
||||
public V get() {
|
||||
return listGetter.get().get(index);
|
||||
}
|
||||
};
|
||||
}
|
||||
@Override
|
||||
public V get() {
|
||||
return listGetter.get().get(index);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public static <K, V> Getter<V> get(Getter<Map<K, V>> mapGetter, K k) {
|
||||
Objects.requireNonNull(mapGetter, "mapGetter is null");
|
||||
Objects.requireNonNull(k, "key is null");
|
||||
public static <K, V> Getter<V> get(Getter<Map<K, V>> mapGetter, K k) {
|
||||
Objects.requireNonNull(mapGetter, "mapGetter is null");
|
||||
Objects.requireNonNull(k, "key is null");
|
||||
|
||||
return new Getter<V>() {
|
||||
return new Getter<V>() {
|
||||
|
||||
@Override
|
||||
public V get() {
|
||||
return mapGetter.get().get(k);
|
||||
}
|
||||
};
|
||||
}
|
||||
@Override
|
||||
public V get() {
|
||||
return mapGetter.get().get(k);
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,15 +15,16 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import com.datastax.driver.core.*;
|
||||
import com.datastax.driver.core.IndexMetadata;
|
||||
import com.datastax.driver.core.querybuilder.IsNotNullClause;
|
||||
import com.datastax.driver.core.querybuilder.QueryBuilder;
|
||||
import com.datastax.driver.core.querybuilder.Select;
|
||||
import com.datastax.driver.core.schemabuilder.*;
|
||||
import com.datastax.driver.core.schemabuilder.Create.Options;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.mapping.*;
|
||||
import net.helenus.mapping.ColumnType;
|
||||
|
@ -32,417 +33,394 @@ import net.helenus.mapping.type.OptionalColumnMetadata;
|
|||
import net.helenus.support.CqlUtil;
|
||||
import net.helenus.support.HelenusMappingException;
|
||||
|
||||
|
||||
public final class SchemaUtil {
|
||||
|
||||
private SchemaUtil() {}
|
||||
private SchemaUtil() {
|
||||
}
|
||||
|
||||
public static RegularStatement use(String keyspace, boolean forceQuote) {
|
||||
if (forceQuote) {
|
||||
return new SimpleStatement("USE" + CqlUtil.forceQuote(keyspace));
|
||||
} else {
|
||||
return new SimpleStatement("USE " + keyspace);
|
||||
}
|
||||
}
|
||||
public static RegularStatement use(String keyspace, boolean forceQuote) {
|
||||
if (forceQuote) {
|
||||
return new SimpleStatement("USE" + CqlUtil.forceQuote(keyspace));
|
||||
} else {
|
||||
return new SimpleStatement("USE " + keyspace);
|
||||
}
|
||||
}
|
||||
|
||||
public static SchemaStatement createUserType(HelenusEntity entity) {
|
||||
public static SchemaStatement createUserType(HelenusEntity entity) {
|
||||
|
||||
if (entity.getType() != HelenusEntityType.UDT) {
|
||||
throw new HelenusMappingException("expected UDT entity " + entity);
|
||||
}
|
||||
if (entity.getType() != HelenusEntityType.UDT) {
|
||||
throw new HelenusMappingException("expected UDT entity " + entity);
|
||||
}
|
||||
|
||||
CreateType create = SchemaBuilder.createType(entity.getName().toCql());
|
||||
CreateType create = SchemaBuilder.createType(entity.getName().toCql());
|
||||
|
||||
for (HelenusProperty prop : entity.getOrderedProperties()) {
|
||||
for (HelenusProperty prop : entity.getOrderedProperties()) {
|
||||
|
||||
ColumnType columnType = prop.getColumnType();
|
||||
ColumnType columnType = prop.getColumnType();
|
||||
|
||||
if (columnType == ColumnType.PARTITION_KEY || columnType == ColumnType.CLUSTERING_COLUMN) {
|
||||
throw new HelenusMappingException(
|
||||
"primary key columns are not supported in UserDefinedType for "
|
||||
+ prop.getPropertyName()
|
||||
+ " in entity "
|
||||
+ entity);
|
||||
}
|
||||
if (columnType == ColumnType.PARTITION_KEY || columnType == ColumnType.CLUSTERING_COLUMN) {
|
||||
throw new HelenusMappingException("primary key columns are not supported in UserDefinedType for "
|
||||
+ prop.getPropertyName() + " in entity " + entity);
|
||||
}
|
||||
|
||||
try {
|
||||
prop.getDataType().addColumn(create, prop.getColumnName());
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new HelenusMappingException(
|
||||
"invalid column name '"
|
||||
+ prop.getColumnName()
|
||||
+ "' in entity '"
|
||||
+ entity.getName().getName()
|
||||
+ "'",
|
||||
e);
|
||||
}
|
||||
}
|
||||
|
||||
return create;
|
||||
}
|
||||
|
||||
public static List<SchemaStatement> alterUserType(
|
||||
UserType userType, HelenusEntity entity, boolean dropUnusedColumns) {
|
||||
|
||||
if (entity.getType() != HelenusEntityType.UDT) {
|
||||
throw new HelenusMappingException("expected UDT entity " + entity);
|
||||
}
|
||||
|
||||
List<SchemaStatement> result = new ArrayList<SchemaStatement>();
|
||||
|
||||
/**
|
||||
* TODO: In future replace SchemaBuilder.alterTable by SchemaBuilder.alterType when it will
|
||||
* exist
|
||||
*/
|
||||
Alter alter = SchemaBuilder.alterTable(entity.getName().toCql());
|
||||
|
||||
final Set<String> visitedColumns =
|
||||
dropUnusedColumns ? new HashSet<String>() : Collections.<String>emptySet();
|
||||
|
||||
for (HelenusProperty prop : entity.getOrderedProperties()) {
|
||||
|
||||
String columnName = prop.getColumnName().getName();
|
||||
|
||||
if (dropUnusedColumns) {
|
||||
visitedColumns.add(columnName);
|
||||
}
|
||||
|
||||
ColumnType columnType = prop.getColumnType();
|
||||
|
||||
if (columnType == ColumnType.PARTITION_KEY || columnType == ColumnType.CLUSTERING_COLUMN) {
|
||||
continue;
|
||||
}
|
||||
|
||||
DataType dataType = userType.getFieldType(columnName);
|
||||
SchemaStatement stmt =
|
||||
prop.getDataType()
|
||||
.alterColumn(alter, prop.getColumnName(), optional(columnName, dataType));
|
||||
|
||||
if (stmt != null) {
|
||||
result.add(stmt);
|
||||
}
|
||||
}
|
||||
|
||||
if (dropUnusedColumns) {
|
||||
for (String field : userType.getFieldNames()) {
|
||||
if (!visitedColumns.contains(field)) {
|
||||
|
||||
result.add(alter.dropColumn(field));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public static SchemaStatement dropUserType(HelenusEntity entity) {
|
||||
|
||||
if (entity.getType() != HelenusEntityType.UDT) {
|
||||
throw new HelenusMappingException("expected UDT entity " + entity);
|
||||
}
|
||||
|
||||
return SchemaBuilder.dropType(entity.getName().toCql()).ifExists();
|
||||
}
|
||||
|
||||
public static SchemaStatement dropUserType(UserType type) {
|
||||
|
||||
return SchemaBuilder.dropType(type.getTypeName()).ifExists();
|
||||
}
|
||||
|
||||
public static SchemaStatement createMaterializedView(
|
||||
String keyspace, String viewName, HelenusEntity entity) {
|
||||
if (entity.getType() != HelenusEntityType.VIEW) {
|
||||
throw new HelenusMappingException("expected view entity " + entity);
|
||||
}
|
||||
|
||||
if (entity == null) {
|
||||
throw new HelenusMappingException("no entity or table to select data");
|
||||
}
|
||||
|
||||
List<HelenusPropertyNode> props = new ArrayList<HelenusPropertyNode>();
|
||||
entity
|
||||
.getOrderedProperties()
|
||||
.stream()
|
||||
.map(p -> new HelenusPropertyNode(p, Optional.empty()))
|
||||
.forEach(p -> props.add(p));
|
||||
|
||||
Select.Selection selection = QueryBuilder.select();
|
||||
|
||||
for (HelenusPropertyNode prop : props) {
|
||||
String columnName = prop.getColumnName();
|
||||
selection = selection.column(columnName);
|
||||
}
|
||||
Class<?> iface = entity.getMappingInterface();
|
||||
String tableName = Helenus.entity(iface.getInterfaces()[0]).getName().toCql();
|
||||
Select.Where where = selection.from(tableName).where();
|
||||
List<String> p = new ArrayList<String>(props.size());
|
||||
List<String> c = new ArrayList<String>(props.size());
|
||||
List<String> o = new ArrayList<String>(props.size());
|
||||
try {
|
||||
prop.getDataType().addColumn(create, prop.getColumnName());
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new HelenusMappingException("invalid column name '" + prop.getColumnName() + "' in entity '"
|
||||
+ entity.getName().getName() + "'", e);
|
||||
}
|
||||
}
|
||||
|
||||
for (HelenusPropertyNode prop : props) {
|
||||
String columnName = prop.getColumnName();
|
||||
switch (prop.getProperty().getColumnType()) {
|
||||
case PARTITION_KEY:
|
||||
p.add(columnName);
|
||||
where = where.and(new IsNotNullClause(columnName));
|
||||
break;
|
||||
return create;
|
||||
}
|
||||
|
||||
case CLUSTERING_COLUMN:
|
||||
c.add(columnName);
|
||||
where = where.and(new IsNotNullClause(columnName));
|
||||
public static List<SchemaStatement> alterUserType(UserType userType, HelenusEntity entity,
|
||||
boolean dropUnusedColumns) {
|
||||
|
||||
ClusteringColumn clusteringColumn = prop.getProperty().getGetterMethod().getAnnotation(ClusteringColumn.class);
|
||||
if (clusteringColumn != null && clusteringColumn.ordering() != null) {
|
||||
o.add(columnName + " " + clusteringColumn.ordering().cql());
|
||||
}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (entity.getType() != HelenusEntityType.UDT) {
|
||||
throw new HelenusMappingException("expected UDT entity " + entity);
|
||||
}
|
||||
|
||||
String primaryKey =
|
||||
"PRIMARY KEY ("
|
||||
+ ((p.size() > 1) ? "(" + String.join(", ", p) + ")" : p.get(0))
|
||||
+ ((c.size() > 0)
|
||||
? ", " + ((c.size() > 1) ? "(" + String.join(", ", c) + ")" : c.get(0))
|
||||
: "")
|
||||
+ ")";
|
||||
List<SchemaStatement> result = new ArrayList<SchemaStatement>();
|
||||
|
||||
String clustering = "";
|
||||
if (o.size() > 0) {
|
||||
clustering = "WITH CLUSTERING ORDER BY (" + String.join(", ", o) + ")";
|
||||
}
|
||||
return new CreateMaterializedView(keyspace, viewName, where, primaryKey, clustering);
|
||||
}
|
||||
/**
|
||||
* TODO: In future replace SchemaBuilder.alterTable by SchemaBuilder.alterType
|
||||
* when it will exist
|
||||
*/
|
||||
Alter alter = SchemaBuilder.alterTable(entity.getName().toCql());
|
||||
|
||||
public static SchemaStatement dropMaterializedView(
|
||||
String keyspace, String viewName, HelenusEntity entity) {
|
||||
return new DropMaterializedView(keyspace, viewName);
|
||||
}
|
||||
|
||||
public static SchemaStatement createTable(HelenusEntity entity) {
|
||||
|
||||
if (entity.getType() != HelenusEntityType.TABLE) {
|
||||
throw new HelenusMappingException("expected table entity " + entity);
|
||||
}
|
||||
|
||||
// NOTE: There is a bug in the normal path of createTable where the
|
||||
// "cache" is set too early and never unset preventing more than
|
||||
// one column on a table.
|
||||
// SchemaBuilder.createTable(entity.getName().toCql());
|
||||
CreateTable create = new CreateTable(entity.getName().toCql());
|
||||
|
||||
create.ifNotExists();
|
||||
|
||||
List<HelenusProperty> clusteringColumns = new ArrayList<HelenusProperty>();
|
||||
|
||||
for (HelenusProperty prop : entity.getOrderedProperties()) {
|
||||
|
||||
ColumnType columnType = prop.getColumnType();
|
||||
|
||||
if (columnType == ColumnType.CLUSTERING_COLUMN) {
|
||||
clusteringColumns.add(prop);
|
||||
}
|
||||
|
||||
prop.getDataType().addColumn(create, prop.getColumnName());
|
||||
}
|
||||
|
||||
if (!clusteringColumns.isEmpty()) {
|
||||
Options options = create.withOptions();
|
||||
clusteringColumns.forEach(
|
||||
p -> options.clusteringOrder(p.getColumnName().toCql(), mapDirection(p.getOrdering())));
|
||||
}
|
||||
|
||||
return create;
|
||||
}
|
||||
|
||||
public static List<SchemaStatement> alterTable(
|
||||
TableMetadata tmd, HelenusEntity entity, boolean dropUnusedColumns) {
|
||||
|
||||
if (entity.getType() != HelenusEntityType.TABLE) {
|
||||
throw new HelenusMappingException("expected table entity " + entity);
|
||||
}
|
||||
|
||||
List<SchemaStatement> result = new ArrayList<SchemaStatement>();
|
||||
|
||||
Alter alter = SchemaBuilder.alterTable(entity.getName().toCql());
|
||||
|
||||
final Set<String> visitedColumns =
|
||||
dropUnusedColumns ? new HashSet<String>() : Collections.<String>emptySet();
|
||||
|
||||
for (HelenusProperty prop : entity.getOrderedProperties()) {
|
||||
|
||||
String columnName = prop.getColumnName().getName();
|
||||
|
||||
if (dropUnusedColumns) {
|
||||
visitedColumns.add(columnName);
|
||||
}
|
||||
|
||||
ColumnType columnType = prop.getColumnType();
|
||||
|
||||
if (columnType == ColumnType.PARTITION_KEY || columnType == ColumnType.CLUSTERING_COLUMN) {
|
||||
continue;
|
||||
}
|
||||
|
||||
ColumnMetadata columnMetadata = tmd.getColumn(columnName);
|
||||
SchemaStatement stmt =
|
||||
prop.getDataType().alterColumn(alter, prop.getColumnName(), optional(columnMetadata));
|
||||
|
||||
if (stmt != null) {
|
||||
result.add(stmt);
|
||||
}
|
||||
}
|
||||
|
||||
if (dropUnusedColumns) {
|
||||
for (ColumnMetadata cm : tmd.getColumns()) {
|
||||
if (!visitedColumns.contains(cm.getName())) {
|
||||
|
||||
result.add(alter.dropColumn(cm.getName()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public static SchemaStatement dropTable(HelenusEntity entity) {
|
||||
|
||||
if (entity.getType() != HelenusEntityType.TABLE) {
|
||||
throw new HelenusMappingException("expected table entity " + entity);
|
||||
}
|
||||
|
||||
return SchemaBuilder.dropTable(entity.getName().toCql()).ifExists();
|
||||
}
|
||||
|
||||
public static SchemaStatement createIndex(HelenusProperty prop) {
|
||||
if (prop.caseSensitiveIndex()) {
|
||||
return SchemaBuilder.createIndex(prop.getIndexName().get().toCql())
|
||||
.ifNotExists()
|
||||
.onTable(prop.getEntity().getName().toCql())
|
||||
.andColumn(prop.getColumnName().toCql());
|
||||
} else {
|
||||
return new CreateSasiIndex(prop.getIndexName().get().toCql())
|
||||
.ifNotExists()
|
||||
.onTable(prop.getEntity().getName().toCql())
|
||||
.andColumn(prop.getColumnName().toCql());
|
||||
}
|
||||
}
|
||||
|
||||
public static List<SchemaStatement> createIndexes(HelenusEntity entity) {
|
||||
|
||||
return entity
|
||||
.getOrderedProperties()
|
||||
.stream()
|
||||
.filter(p -> p.getIndexName().isPresent())
|
||||
.map(p -> SchemaUtil.createIndex(p))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
public static List<SchemaStatement> alterIndexes(
|
||||
TableMetadata tmd, HelenusEntity entity, boolean dropUnusedIndexes) {
|
||||
|
||||
List<SchemaStatement> list = new ArrayList<SchemaStatement>();
|
||||
|
||||
final Set<String> visitedColumns =
|
||||
dropUnusedIndexes ? new HashSet<String>() : Collections.<String>emptySet();
|
||||
|
||||
entity
|
||||
.getOrderedProperties()
|
||||
.stream()
|
||||
.filter(p -> p.getIndexName().isPresent())
|
||||
.forEach(
|
||||
p -> {
|
||||
String columnName = p.getColumnName().getName();
|
||||
|
||||
if (dropUnusedIndexes) {
|
||||
visitedColumns.add(columnName);
|
||||
}
|
||||
|
||||
ColumnMetadata cm = tmd.getColumn(columnName);
|
||||
|
||||
if (cm != null) {
|
||||
IndexMetadata im = tmd.getIndex(columnName);
|
||||
if (im == null) {
|
||||
list.add(createIndex(p));
|
||||
}
|
||||
} else {
|
||||
list.add(createIndex(p));
|
||||
}
|
||||
});
|
||||
|
||||
if (dropUnusedIndexes) {
|
||||
|
||||
tmd.getColumns()
|
||||
.stream()
|
||||
.filter(c -> tmd.getIndex(c.getName()) != null && !visitedColumns.contains(c.getName()))
|
||||
.forEach(
|
||||
c -> {
|
||||
list.add(SchemaBuilder.dropIndex(tmd.getIndex(c.getName()).getName()).ifExists());
|
||||
});
|
||||
}
|
||||
|
||||
return list;
|
||||
}
|
||||
|
||||
public static SchemaStatement dropIndex(HelenusProperty prop) {
|
||||
return SchemaBuilder.dropIndex(prop.getIndexName().get().toCql()).ifExists();
|
||||
}
|
||||
|
||||
private static SchemaBuilder.Direction mapDirection(OrderingDirection o) {
|
||||
switch (o) {
|
||||
case ASC:
|
||||
return SchemaBuilder.Direction.ASC;
|
||||
case DESC:
|
||||
return SchemaBuilder.Direction.DESC;
|
||||
}
|
||||
throw new HelenusMappingException("unknown ordering " + o);
|
||||
}
|
||||
|
||||
public static void throwNoMapping(HelenusProperty prop) {
|
||||
|
||||
throw new HelenusMappingException(
|
||||
"only primitive types and Set,List,Map collections and UserDefinedTypes are allowed, unknown type for property '"
|
||||
+ prop.getPropertyName()
|
||||
+ "' type is '"
|
||||
+ prop.getJavaType()
|
||||
+ "' in the entity "
|
||||
+ prop.getEntity());
|
||||
}
|
||||
|
||||
private static OptionalColumnMetadata optional(final ColumnMetadata columnMetadata) {
|
||||
if (columnMetadata != null) {
|
||||
return new OptionalColumnMetadata() {
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return columnMetadata.getName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public DataType getType() {
|
||||
return columnMetadata.getType();
|
||||
}
|
||||
};
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private static OptionalColumnMetadata optional(final String name, final DataType dataType) {
|
||||
if (dataType != null) {
|
||||
return new OptionalColumnMetadata() {
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DataType getType() {
|
||||
return dataType;
|
||||
}
|
||||
};
|
||||
}
|
||||
return null;
|
||||
}
|
||||
final Set<String> visitedColumns = dropUnusedColumns ? new HashSet<String>() : Collections.<String>emptySet();
|
||||
|
||||
for (HelenusProperty prop : entity.getOrderedProperties()) {
|
||||
|
||||
String columnName = prop.getColumnName().getName();
|
||||
|
||||
if (dropUnusedColumns) {
|
||||
visitedColumns.add(columnName);
|
||||
}
|
||||
|
||||
ColumnType columnType = prop.getColumnType();
|
||||
|
||||
if (columnType == ColumnType.PARTITION_KEY || columnType == ColumnType.CLUSTERING_COLUMN) {
|
||||
continue;
|
||||
}
|
||||
|
||||
DataType dataType = userType.getFieldType(columnName);
|
||||
SchemaStatement stmt = prop.getDataType().alterColumn(alter, prop.getColumnName(),
|
||||
optional(columnName, dataType));
|
||||
|
||||
if (stmt != null) {
|
||||
result.add(stmt);
|
||||
}
|
||||
}
|
||||
|
||||
if (dropUnusedColumns) {
|
||||
for (String field : userType.getFieldNames()) {
|
||||
if (!visitedColumns.contains(field)) {
|
||||
|
||||
result.add(alter.dropColumn(field));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public static SchemaStatement dropUserType(HelenusEntity entity) {
|
||||
|
||||
if (entity.getType() != HelenusEntityType.UDT) {
|
||||
throw new HelenusMappingException("expected UDT entity " + entity);
|
||||
}
|
||||
|
||||
return SchemaBuilder.dropType(entity.getName().toCql()).ifExists();
|
||||
}
|
||||
|
||||
public static SchemaStatement dropUserType(UserType type) {
|
||||
|
||||
return SchemaBuilder.dropType(type.getTypeName()).ifExists();
|
||||
}
|
||||
|
||||
public static String createPrimaryKeyPhrase(Collection<HelenusProperty> properties) {
|
||||
List<String> p = new ArrayList<String>(properties.size());
|
||||
List<String> c = new ArrayList<String>(properties.size());
|
||||
|
||||
for (HelenusProperty prop : properties) {
|
||||
String columnName = prop.getColumnName().toCql();
|
||||
switch (prop.getColumnType()) {
|
||||
case PARTITION_KEY :
|
||||
p.add(columnName);
|
||||
break;
|
||||
case CLUSTERING_COLUMN :
|
||||
c.add(columnName);
|
||||
break;
|
||||
default :
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return "(" + ((p.size() > 1) ? "(" + String.join(", ", p) + ")" : p.get(0))
|
||||
+ ((c.size() > 0) ? ", " + ((c.size() > 1) ? "(" + String.join(", ", c) + ")" : c.get(0)) : "") + ")";
|
||||
}
|
||||
|
||||
public static SchemaStatement createMaterializedView(String keyspace, String viewName, HelenusEntity entity) {
|
||||
if (entity.getType() != HelenusEntityType.VIEW) {
|
||||
throw new HelenusMappingException("expected view entity " + entity);
|
||||
}
|
||||
|
||||
if (entity == null) {
|
||||
throw new HelenusMappingException("no entity or table to select data");
|
||||
}
|
||||
|
||||
List<HelenusPropertyNode> props = new ArrayList<HelenusPropertyNode>();
|
||||
entity.getOrderedProperties().stream().map(p -> new HelenusPropertyNode(p, Optional.empty()))
|
||||
.forEach(p -> props.add(p));
|
||||
|
||||
Select.Selection selection = QueryBuilder.select();
|
||||
|
||||
for (HelenusPropertyNode prop : props) {
|
||||
String columnName = prop.getColumnName();
|
||||
selection = selection.column(columnName);
|
||||
}
|
||||
Class<?> iface = entity.getMappingInterface();
|
||||
String tableName = Helenus.entity(iface.getInterfaces()[0]).getName().toCql();
|
||||
Select.Where where = selection.from(tableName).where();
|
||||
List<String> o = new ArrayList<String>(props.size());
|
||||
|
||||
for (HelenusPropertyNode prop : props) {
|
||||
String columnName = prop.getColumnName();
|
||||
switch (prop.getProperty().getColumnType()) {
|
||||
case PARTITION_KEY :
|
||||
where = where.and(new IsNotNullClause(columnName));
|
||||
break;
|
||||
|
||||
case CLUSTERING_COLUMN :
|
||||
where = where.and(new IsNotNullClause(columnName));
|
||||
|
||||
ClusteringColumn clusteringColumn = prop.getProperty().getGetterMethod()
|
||||
.getAnnotation(ClusteringColumn.class);
|
||||
if (clusteringColumn != null && clusteringColumn.ordering() != null) {
|
||||
o.add(columnName + " " + clusteringColumn.ordering().cql());
|
||||
}
|
||||
break;
|
||||
default :
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
String primaryKey = "PRIMARY KEY " + createPrimaryKeyPhrase(entity.getOrderedProperties());
|
||||
|
||||
String clustering = "";
|
||||
if (o.size() > 0) {
|
||||
clustering = "WITH CLUSTERING ORDER BY (" + String.join(", ", o) + ")";
|
||||
}
|
||||
return new CreateMaterializedView(keyspace, viewName, where, primaryKey, clustering).ifNotExists();
|
||||
}
|
||||
|
||||
public static SchemaStatement dropMaterializedView(String keyspace, String viewName, HelenusEntity entity) {
|
||||
return new DropMaterializedView(keyspace, viewName);
|
||||
}
|
||||
|
||||
public static SchemaStatement createTable(HelenusEntity entity) {
|
||||
|
||||
if (entity.getType() != HelenusEntityType.TABLE) {
|
||||
throw new HelenusMappingException("expected table entity " + entity);
|
||||
}
|
||||
|
||||
// NOTE: There is a bug in the normal path of createTable where the
|
||||
// "cache" is set too early and never unset preventing more than
|
||||
// one column on a table.
|
||||
// SchemaBuilder.createTable(entity.getName().toCql());
|
||||
CreateTable create = new CreateTable(entity.getName().toCql());
|
||||
|
||||
create.ifNotExists();
|
||||
|
||||
List<HelenusProperty> clusteringColumns = new ArrayList<HelenusProperty>();
|
||||
|
||||
for (HelenusProperty prop : entity.getOrderedProperties()) {
|
||||
|
||||
ColumnType columnType = prop.getColumnType();
|
||||
|
||||
if (columnType == ColumnType.CLUSTERING_COLUMN) {
|
||||
clusteringColumns.add(prop);
|
||||
}
|
||||
|
||||
prop.getDataType().addColumn(create, prop.getColumnName());
|
||||
}
|
||||
|
||||
if (!clusteringColumns.isEmpty()) {
|
||||
Options options = create.withOptions();
|
||||
clusteringColumns
|
||||
.forEach(p -> options.clusteringOrder(p.getColumnName().toCql(), mapDirection(p.getOrdering())));
|
||||
}
|
||||
|
||||
return create;
|
||||
}
|
||||
|
||||
public static List<SchemaStatement> alterTable(TableMetadata tmd, HelenusEntity entity, boolean dropUnusedColumns) {
|
||||
|
||||
if (entity.getType() != HelenusEntityType.TABLE) {
|
||||
throw new HelenusMappingException("expected table entity " + entity);
|
||||
}
|
||||
|
||||
List<SchemaStatement> result = new ArrayList<SchemaStatement>();
|
||||
|
||||
Alter alter = SchemaBuilder.alterTable(entity.getName().toCql());
|
||||
|
||||
final Set<String> visitedColumns = dropUnusedColumns ? new HashSet<String>() : Collections.<String>emptySet();
|
||||
|
||||
for (HelenusProperty prop : entity.getOrderedProperties()) {
|
||||
|
||||
String columnName = prop.getColumnName().getName();
|
||||
|
||||
if (dropUnusedColumns) {
|
||||
visitedColumns.add(columnName);
|
||||
}
|
||||
|
||||
ColumnType columnType = prop.getColumnType();
|
||||
|
||||
if (columnType == ColumnType.PARTITION_KEY || columnType == ColumnType.CLUSTERING_COLUMN) {
|
||||
continue;
|
||||
}
|
||||
|
||||
ColumnMetadata columnMetadata = tmd.getColumn(columnName);
|
||||
SchemaStatement stmt = prop.getDataType().alterColumn(alter, prop.getColumnName(),
|
||||
optional(columnMetadata));
|
||||
|
||||
if (stmt != null) {
|
||||
result.add(stmt);
|
||||
}
|
||||
}
|
||||
|
||||
if (dropUnusedColumns) {
|
||||
for (ColumnMetadata cm : tmd.getColumns()) {
|
||||
if (!visitedColumns.contains(cm.getName())) {
|
||||
|
||||
result.add(alter.dropColumn(cm.getName()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public static SchemaStatement dropTable(HelenusEntity entity) {
|
||||
|
||||
if (entity.getType() != HelenusEntityType.TABLE) {
|
||||
throw new HelenusMappingException("expected table entity " + entity);
|
||||
}
|
||||
|
||||
return SchemaBuilder.dropTable(entity.getName().toCql()).ifExists();
|
||||
}
|
||||
|
||||
public static SchemaStatement createIndex(HelenusProperty prop) {
|
||||
if (prop.caseSensitiveIndex()) {
|
||||
return SchemaBuilder.createIndex(prop.getIndexName().get().toCql()).ifNotExists()
|
||||
.onTable(prop.getEntity().getName().toCql()).andColumn(prop.getColumnName().toCql());
|
||||
} else {
|
||||
return new CreateSasiIndex(prop.getIndexName().get().toCql()).ifNotExists()
|
||||
.onTable(prop.getEntity().getName().toCql()).andColumn(prop.getColumnName().toCql());
|
||||
}
|
||||
}
|
||||
|
||||
public static List<SchemaStatement> createIndexes(HelenusEntity entity) {
|
||||
|
||||
return entity.getOrderedProperties().stream().filter(p -> p.getIndexName().isPresent())
|
||||
.map(p -> SchemaUtil.createIndex(p)).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
public static List<SchemaStatement> alterIndexes(TableMetadata tmd, HelenusEntity entity,
|
||||
boolean dropUnusedIndexes) {
|
||||
|
||||
List<SchemaStatement> list = new ArrayList<SchemaStatement>();
|
||||
|
||||
final Set<String> visitedColumns = dropUnusedIndexes ? new HashSet<String>() : Collections.<String>emptySet();
|
||||
|
||||
entity.getOrderedProperties().stream().filter(p -> p.getIndexName().isPresent()).forEach(p -> {
|
||||
String columnName = p.getColumnName().getName();
|
||||
|
||||
if (dropUnusedIndexes) {
|
||||
visitedColumns.add(columnName);
|
||||
}
|
||||
|
||||
ColumnMetadata cm = tmd.getColumn(columnName);
|
||||
|
||||
if (cm != null) {
|
||||
IndexMetadata im = tmd.getIndex(columnName);
|
||||
if (im == null) {
|
||||
list.add(createIndex(p));
|
||||
}
|
||||
} else {
|
||||
list.add(createIndex(p));
|
||||
}
|
||||
});
|
||||
|
||||
if (dropUnusedIndexes) {
|
||||
|
||||
tmd.getColumns().stream()
|
||||
.filter(c -> tmd.getIndex(c.getName()) != null && !visitedColumns.contains(c.getName()))
|
||||
.forEach(c -> {
|
||||
list.add(SchemaBuilder.dropIndex(tmd.getIndex(c.getName()).getName()).ifExists());
|
||||
});
|
||||
}
|
||||
|
||||
return list;
|
||||
}
|
||||
|
||||
public static SchemaStatement dropIndex(HelenusProperty prop) {
|
||||
return SchemaBuilder.dropIndex(prop.getIndexName().get().toCql()).ifExists();
|
||||
}
|
||||
|
||||
private static SchemaBuilder.Direction mapDirection(OrderingDirection o) {
|
||||
switch (o) {
|
||||
case ASC :
|
||||
return SchemaBuilder.Direction.ASC;
|
||||
case DESC :
|
||||
return SchemaBuilder.Direction.DESC;
|
||||
}
|
||||
throw new HelenusMappingException("unknown ordering " + o);
|
||||
}
|
||||
|
||||
public static void throwNoMapping(HelenusProperty prop) {
|
||||
|
||||
throw new HelenusMappingException(
|
||||
"only primitive types and Set,List,Map collections and UserDefinedTypes are allowed, unknown type for property '"
|
||||
+ prop.getPropertyName() + "' type is '" + prop.getJavaType() + "' in the entity "
|
||||
+ prop.getEntity());
|
||||
}
|
||||
|
||||
private static OptionalColumnMetadata optional(final ColumnMetadata columnMetadata) {
|
||||
if (columnMetadata != null) {
|
||||
return new OptionalColumnMetadata() {
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return columnMetadata.getName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public DataType getType() {
|
||||
return columnMetadata.getType();
|
||||
}
|
||||
};
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private static OptionalColumnMetadata optional(final String name, final DataType dataType) {
|
||||
if (dataType != null) {
|
||||
return new OptionalColumnMetadata() {
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DataType getType() {
|
||||
return dataType;
|
||||
}
|
||||
};
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,16 +15,18 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import brave.Tracer;
|
||||
import com.codahale.metrics.MetricRegistry;
|
||||
import com.datastax.driver.core.*;
|
||||
import com.google.common.util.concurrent.MoreExecutors;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintStream;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.Executor;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import com.codahale.metrics.MetricRegistry;
|
||||
import com.datastax.driver.core.*;
|
||||
import com.google.common.util.concurrent.MoreExecutors;
|
||||
|
||||
import brave.Tracer;
|
||||
import net.helenus.core.reflect.DslExportable;
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.mapping.HelenusEntityType;
|
||||
|
@ -37,399 +39,344 @@ import net.helenus.support.PackageUtil;
|
|||
|
||||
public final class SessionInitializer extends AbstractSessionOperations {
|
||||
|
||||
private final Session session;
|
||||
private CodecRegistry registry;
|
||||
private String usingKeyspace;
|
||||
private boolean showCql = false;
|
||||
private ConsistencyLevel consistencyLevel;
|
||||
private boolean idempotent = true;
|
||||
private MetricRegistry metricRegistry = new MetricRegistry();
|
||||
private Tracer zipkinTracer;
|
||||
private PrintStream printStream = System.out;
|
||||
private Executor executor = MoreExecutors.directExecutor();
|
||||
private Class<? extends UnitOfWork> unitOfWorkClass = UnitOfWorkImpl.class;
|
||||
|
||||
private SessionRepositoryBuilder sessionRepository;
|
||||
|
||||
private boolean dropUnusedColumns = false;
|
||||
private boolean dropUnusedIndexes = false;
|
||||
|
||||
private KeyspaceMetadata keyspaceMetadata;
|
||||
|
||||
private final List<Either<Object, Class<?>>> initList = new ArrayList<Either<Object, Class<?>>>();
|
||||
private AutoDdl autoDdl = AutoDdl.UPDATE;
|
||||
|
||||
SessionInitializer(Session session) {
|
||||
this.session = Objects.requireNonNull(session, "empty session");
|
||||
this.usingKeyspace = session.getLoggedKeyspace(); // can be null
|
||||
this.sessionRepository = new SessionRepositoryBuilder(session);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Session currentSession() {
|
||||
return session;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String usingKeyspace() {
|
||||
return usingKeyspace;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Executor getExecutor() {
|
||||
return executor;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SessionRepository getSessionRepository() {
|
||||
throw new HelenusException("not expected to call");
|
||||
}
|
||||
|
||||
@Override
|
||||
public ColumnValueProvider getValueProvider() {
|
||||
throw new HelenusException("not expected to call");
|
||||
}
|
||||
|
||||
@Override
|
||||
public ColumnValuePreparer getValuePreparer() {
|
||||
throw new HelenusException("not expected to call");
|
||||
}
|
||||
|
||||
public SessionInitializer showCql() {
|
||||
this.showCql = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer showCql(boolean enabled) {
|
||||
this.showCql = enabled;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer metricRegistry(MetricRegistry metricRegistry) {
|
||||
this.metricRegistry = metricRegistry;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer zipkinTracer(Tracer tracer) {
|
||||
this.zipkinTracer = tracer;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer setUnitOfWorkClass(Class<? extends UnitOfWork> e) {
|
||||
this.unitOfWorkClass = e;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer consistencyLevel(ConsistencyLevel consistencyLevel) {
|
||||
this.consistencyLevel = consistencyLevel;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ConsistencyLevel getDefaultConsistencyLevel() {
|
||||
return consistencyLevel;
|
||||
}
|
||||
|
||||
public SessionInitializer idempotentQueryExecution(boolean idempotent) {
|
||||
this.idempotent = idempotent;
|
||||
return this;
|
||||
}
|
||||
|
||||
public boolean getDefaultQueryIdempotency() {
|
||||
return idempotent;
|
||||
}
|
||||
|
||||
@Override
|
||||
public PrintStream getPrintStream() {
|
||||
return printStream;
|
||||
}
|
||||
|
||||
public SessionInitializer printTo(PrintStream out) {
|
||||
this.printStream = out;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer withExecutor(Executor executor) {
|
||||
Objects.requireNonNull(executor, "empty executor");
|
||||
this.executor = executor;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer withCachingExecutor() {
|
||||
this.executor = Executors.newCachedThreadPool();
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer dropUnusedColumns(boolean enabled) {
|
||||
this.dropUnusedColumns = enabled;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer dropUnusedIndexes(boolean enabled) {
|
||||
this.dropUnusedIndexes = enabled;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer withCodecRegistry(CodecRegistry registry) {
|
||||
this.registry = registry;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isShowCql() {
|
||||
return showCql;
|
||||
}
|
||||
|
||||
public SessionInitializer addPackage(String packageName) {
|
||||
try {
|
||||
PackageUtil.getClasses(packageName)
|
||||
.stream()
|
||||
.filter(c -> c.isInterface() && !c.isAnnotation())
|
||||
.forEach(
|
||||
clazz -> {
|
||||
initList.add(Either.right(clazz));
|
||||
});
|
||||
} catch (IOException | ClassNotFoundException e) {
|
||||
throw new HelenusException("fail to add package " + packageName, e);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer add(Object... dsls) {
|
||||
Objects.requireNonNull(dsls, "dsls is empty");
|
||||
int len = dsls.length;
|
||||
for (int i = 0; i != len; ++i) {
|
||||
Object obj = Objects.requireNonNull(dsls[i], "element " + i + " is empty");
|
||||
initList.add(Either.left(obj));
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer autoValidate() {
|
||||
this.autoDdl = AutoDdl.VALIDATE;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer autoUpdate() {
|
||||
this.autoDdl = AutoDdl.UPDATE;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer autoCreate() {
|
||||
this.autoDdl = AutoDdl.CREATE;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer autoCreateDrop() {
|
||||
this.autoDdl = AutoDdl.CREATE_DROP;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer auto(AutoDdl autoDdl) {
|
||||
this.autoDdl = autoDdl;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer use(String keyspace) {
|
||||
session.execute(SchemaUtil.use(keyspace, false));
|
||||
this.usingKeyspace = keyspace;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer use(String keyspace, boolean forceQuote) {
|
||||
session.execute(SchemaUtil.use(keyspace, forceQuote));
|
||||
this.usingKeyspace = keyspace;
|
||||
return this;
|
||||
}
|
||||
|
||||
public void singleton() {
|
||||
Helenus.setSession(get());
|
||||
}
|
||||
|
||||
public synchronized HelenusSession get() {
|
||||
initialize();
|
||||
return new HelenusSession(
|
||||
session,
|
||||
usingKeyspace,
|
||||
registry,
|
||||
showCql,
|
||||
printStream,
|
||||
sessionRepository,
|
||||
executor,
|
||||
autoDdl == AutoDdl.CREATE_DROP,
|
||||
consistencyLevel,
|
||||
idempotent,
|
||||
unitOfWorkClass,
|
||||
metricRegistry,
|
||||
zipkinTracer);
|
||||
}
|
||||
|
||||
private void initialize() {
|
||||
|
||||
Objects.requireNonNull(usingKeyspace, "please define keyspace by 'use' operator");
|
||||
|
||||
initList.forEach(
|
||||
(either) -> {
|
||||
Class<?> iface = null;
|
||||
if (either.isLeft()) {
|
||||
iface = MappingUtil.getMappingInterface(either.getLeft());
|
||||
} else {
|
||||
iface = either.getRight();
|
||||
}
|
||||
|
||||
DslExportable dsl = (DslExportable) Helenus.dsl(iface);
|
||||
dsl.setCassandraMetadataForHelenusSesion(session.getCluster().getMetadata());
|
||||
sessionRepository.add(dsl);
|
||||
});
|
||||
|
||||
TableOperations tableOps = new TableOperations(this, dropUnusedColumns, dropUnusedIndexes);
|
||||
UserTypeOperations userTypeOps = new UserTypeOperations(this, dropUnusedColumns);
|
||||
|
||||
switch (autoDdl) {
|
||||
case CREATE_DROP:
|
||||
|
||||
// Drop view first, otherwise a `DROP TABLE ...` will fail as the type is still referenced
|
||||
// by a view.
|
||||
sessionRepository
|
||||
.entities()
|
||||
.stream()
|
||||
.filter(e -> e.getType() == HelenusEntityType.VIEW)
|
||||
.forEach(e -> tableOps.dropView(e));
|
||||
|
||||
// Drop tables second, before DROP TYPE otherwise a `DROP TYPE ...` will fail as the type is
|
||||
// still referenced by a table.
|
||||
sessionRepository
|
||||
.entities()
|
||||
.stream()
|
||||
.filter(e -> e.getType() == HelenusEntityType.TABLE)
|
||||
.forEach(e -> tableOps.dropTable(e));
|
||||
|
||||
eachUserTypeInReverseOrder(userTypeOps, e -> userTypeOps.dropUserType(e));
|
||||
|
||||
// FALLTHRU to CREATE case (read: the absence of a `break;` statement here is intentional!)
|
||||
case CREATE:
|
||||
eachUserTypeInOrder(userTypeOps, e -> userTypeOps.createUserType(e));
|
||||
|
||||
sessionRepository
|
||||
.entities()
|
||||
.stream()
|
||||
.filter(e -> e.getType() == HelenusEntityType.TABLE)
|
||||
.forEach(e -> tableOps.createTable(e));
|
||||
|
||||
sessionRepository
|
||||
.entities()
|
||||
.stream()
|
||||
.filter(e -> e.getType() == HelenusEntityType.VIEW)
|
||||
.forEach(e -> tableOps.createView(e));
|
||||
|
||||
break;
|
||||
|
||||
case VALIDATE:
|
||||
eachUserTypeInOrder(userTypeOps, e -> userTypeOps.validateUserType(getUserType(e), e));
|
||||
|
||||
sessionRepository
|
||||
.entities()
|
||||
.stream()
|
||||
.filter(e -> e.getType() == HelenusEntityType.TABLE)
|
||||
.forEach(e -> tableOps.validateTable(getTableMetadata(e), e));
|
||||
|
||||
break;
|
||||
|
||||
case UPDATE:
|
||||
eachUserTypeInOrder(userTypeOps, e -> userTypeOps.updateUserType(getUserType(e), e));
|
||||
|
||||
sessionRepository
|
||||
.entities()
|
||||
.stream()
|
||||
.filter(e -> e.getType() == HelenusEntityType.VIEW)
|
||||
.forEach(e -> tableOps.dropView(e));
|
||||
|
||||
sessionRepository
|
||||
.entities()
|
||||
.stream()
|
||||
.filter(e -> e.getType() == HelenusEntityType.TABLE)
|
||||
.forEach(e -> tableOps.updateTable(getTableMetadata(e), e));
|
||||
|
||||
sessionRepository
|
||||
.entities()
|
||||
.stream()
|
||||
.filter(e -> e.getType() == HelenusEntityType.VIEW)
|
||||
.forEach(e -> tableOps.createView(e));
|
||||
break;
|
||||
}
|
||||
|
||||
KeyspaceMetadata km = getKeyspaceMetadata();
|
||||
|
||||
for (UserType userType : km.getUserTypes()) {
|
||||
sessionRepository.addUserType(userType.getTypeName(), userType);
|
||||
}
|
||||
}
|
||||
|
||||
private void eachUserTypeInOrder(
|
||||
UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) {
|
||||
|
||||
Set<HelenusEntity> processedSet = new HashSet<HelenusEntity>();
|
||||
Set<HelenusEntity> stack = new HashSet<HelenusEntity>();
|
||||
|
||||
sessionRepository
|
||||
.entities()
|
||||
.stream()
|
||||
.filter(e -> e.getType() == HelenusEntityType.UDT)
|
||||
.forEach(
|
||||
e -> {
|
||||
stack.clear();
|
||||
eachUserTypeInRecursion(e, processedSet, stack, userTypeOps, action);
|
||||
});
|
||||
}
|
||||
|
||||
private void eachUserTypeInReverseOrder(
|
||||
UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) {
|
||||
ArrayDeque<HelenusEntity> deque = new ArrayDeque<>();
|
||||
eachUserTypeInOrder(userTypeOps, e -> deque.addFirst(e));
|
||||
deque
|
||||
.stream()
|
||||
.forEach(
|
||||
e -> {
|
||||
action.accept(e);
|
||||
});
|
||||
}
|
||||
|
||||
private void eachUserTypeInRecursion(
|
||||
HelenusEntity e,
|
||||
Set<HelenusEntity> processedSet,
|
||||
Set<HelenusEntity> stack,
|
||||
UserTypeOperations userTypeOps,
|
||||
Consumer<? super HelenusEntity> action) {
|
||||
|
||||
stack.add(e);
|
||||
|
||||
Collection<HelenusEntity> createBefore = sessionRepository.getUserTypeUses(e);
|
||||
|
||||
for (HelenusEntity be : createBefore) {
|
||||
if (!processedSet.contains(be) && !stack.contains(be)) {
|
||||
eachUserTypeInRecursion(be, processedSet, stack, userTypeOps, action);
|
||||
processedSet.add(be);
|
||||
}
|
||||
}
|
||||
|
||||
if (!processedSet.contains(e)) {
|
||||
action.accept(e);
|
||||
processedSet.add(e);
|
||||
}
|
||||
}
|
||||
|
||||
private KeyspaceMetadata getKeyspaceMetadata() {
|
||||
if (keyspaceMetadata == null) {
|
||||
keyspaceMetadata =
|
||||
session.getCluster().getMetadata().getKeyspace(usingKeyspace.toLowerCase());
|
||||
}
|
||||
return keyspaceMetadata;
|
||||
}
|
||||
|
||||
private TableMetadata getTableMetadata(HelenusEntity entity) {
|
||||
return getKeyspaceMetadata().getTable(entity.getName().getName());
|
||||
}
|
||||
|
||||
private UserType getUserType(HelenusEntity entity) {
|
||||
return getKeyspaceMetadata().getUserType(entity.getName().getName());
|
||||
}
|
||||
private final Session session;
|
||||
private final List<Either<Object, Class<?>>> initList = new ArrayList<Either<Object, Class<?>>>();
|
||||
private CodecRegistry registry;
|
||||
private String usingKeyspace;
|
||||
private boolean showCql = false;
|
||||
private ConsistencyLevel consistencyLevel;
|
||||
private boolean idempotent = true;
|
||||
private MetricRegistry metricRegistry = new MetricRegistry();
|
||||
private Tracer zipkinTracer;
|
||||
private PrintStream printStream = System.out;
|
||||
private Executor executor = MoreExecutors.directExecutor();
|
||||
private Class<? extends UnitOfWork> unitOfWorkClass = UnitOfWorkImpl.class;
|
||||
private SessionRepositoryBuilder sessionRepository;
|
||||
private boolean dropUnusedColumns = false;
|
||||
private boolean dropUnusedIndexes = false;
|
||||
private KeyspaceMetadata keyspaceMetadata;
|
||||
private AutoDdl autoDdl = AutoDdl.UPDATE;
|
||||
|
||||
SessionInitializer(Session session) {
|
||||
this.session = Objects.requireNonNull(session, "empty session");
|
||||
this.usingKeyspace = session.getLoggedKeyspace(); // can be null
|
||||
this.sessionRepository = new SessionRepositoryBuilder(session);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Session currentSession() {
|
||||
return session;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String usingKeyspace() {
|
||||
return usingKeyspace;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Executor getExecutor() {
|
||||
return executor;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SessionRepository getSessionRepository() {
|
||||
throw new HelenusException("not expected to call");
|
||||
}
|
||||
|
||||
@Override
|
||||
public ColumnValueProvider getValueProvider() {
|
||||
throw new HelenusException("not expected to call");
|
||||
}
|
||||
|
||||
@Override
|
||||
public ColumnValuePreparer getValuePreparer() {
|
||||
throw new HelenusException("not expected to call");
|
||||
}
|
||||
|
||||
public SessionInitializer showCql() {
|
||||
this.showCql = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer showCql(boolean enabled) {
|
||||
this.showCql = enabled;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer metricRegistry(MetricRegistry metricRegistry) {
|
||||
this.metricRegistry = metricRegistry;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer zipkinTracer(Tracer tracer) {
|
||||
this.zipkinTracer = tracer;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer setUnitOfWorkClass(Class<? extends UnitOfWork> e) {
|
||||
this.unitOfWorkClass = e;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer consistencyLevel(ConsistencyLevel consistencyLevel) {
|
||||
this.consistencyLevel = consistencyLevel;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ConsistencyLevel getDefaultConsistencyLevel() {
|
||||
return consistencyLevel;
|
||||
}
|
||||
|
||||
public SessionInitializer idempotentQueryExecution(boolean idempotent) {
|
||||
this.idempotent = idempotent;
|
||||
return this;
|
||||
}
|
||||
|
||||
public boolean getDefaultQueryIdempotency() {
|
||||
return idempotent;
|
||||
}
|
||||
|
||||
@Override
|
||||
public PrintStream getPrintStream() {
|
||||
return printStream;
|
||||
}
|
||||
|
||||
public SessionInitializer printTo(PrintStream out) {
|
||||
this.printStream = out;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer withExecutor(Executor executor) {
|
||||
Objects.requireNonNull(executor, "empty executor");
|
||||
this.executor = executor;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer withCachingExecutor() {
|
||||
this.executor = Executors.newCachedThreadPool();
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer dropUnusedColumns(boolean enabled) {
|
||||
this.dropUnusedColumns = enabled;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer dropUnusedIndexes(boolean enabled) {
|
||||
this.dropUnusedIndexes = enabled;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer withCodecRegistry(CodecRegistry registry) {
|
||||
this.registry = registry;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isShowCql() {
|
||||
return showCql;
|
||||
}
|
||||
|
||||
public SessionInitializer addPackage(String packageName) {
|
||||
try {
|
||||
PackageUtil.getClasses(packageName).stream().filter(c -> c.isInterface() && !c.isAnnotation())
|
||||
.forEach(clazz -> {
|
||||
initList.add(Either.right(clazz));
|
||||
});
|
||||
} catch (IOException | ClassNotFoundException e) {
|
||||
throw new HelenusException("fail to add package " + packageName, e);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer add(Object... dsls) {
|
||||
Objects.requireNonNull(dsls, "dsls is empty");
|
||||
int len = dsls.length;
|
||||
for (int i = 0; i != len; ++i) {
|
||||
Object obj = Objects.requireNonNull(dsls[i], "element " + i + " is empty");
|
||||
initList.add(Either.left(obj));
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer autoValidate() {
|
||||
this.autoDdl = AutoDdl.VALIDATE;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer autoUpdate() {
|
||||
this.autoDdl = AutoDdl.UPDATE;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer autoCreate() {
|
||||
this.autoDdl = AutoDdl.CREATE;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer autoCreateDrop() {
|
||||
this.autoDdl = AutoDdl.CREATE_DROP;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer auto(AutoDdl autoDdl) {
|
||||
this.autoDdl = autoDdl;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer use(String keyspace) {
|
||||
session.execute(SchemaUtil.use(keyspace, false));
|
||||
this.usingKeyspace = keyspace;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer use(String keyspace, boolean forceQuote) {
|
||||
session.execute(SchemaUtil.use(keyspace, forceQuote));
|
||||
this.usingKeyspace = keyspace;
|
||||
return this;
|
||||
}
|
||||
|
||||
public void singleton() {
|
||||
Helenus.setSession(get());
|
||||
}
|
||||
|
||||
public synchronized HelenusSession get() {
|
||||
initialize();
|
||||
return new HelenusSession(session, usingKeyspace, registry, showCql, printStream, sessionRepository, executor,
|
||||
autoDdl == AutoDdl.CREATE_DROP, consistencyLevel, idempotent, unitOfWorkClass, metricRegistry,
|
||||
zipkinTracer);
|
||||
}
|
||||
|
||||
private void initialize() {
|
||||
|
||||
Objects.requireNonNull(usingKeyspace, "please define keyspace by 'use' operator");
|
||||
|
||||
initList.forEach((either) -> {
|
||||
Class<?> iface = null;
|
||||
if (either.isLeft()) {
|
||||
iface = MappingUtil.getMappingInterface(either.getLeft());
|
||||
} else {
|
||||
iface = either.getRight();
|
||||
}
|
||||
|
||||
DslExportable dsl = (DslExportable) Helenus.dsl(iface);
|
||||
dsl.setCassandraMetadataForHelenusSession(session.getCluster().getMetadata());
|
||||
sessionRepository.add(dsl);
|
||||
});
|
||||
|
||||
TableOperations tableOps = new TableOperations(this, dropUnusedColumns, dropUnusedIndexes);
|
||||
UserTypeOperations userTypeOps = new UserTypeOperations(this, dropUnusedColumns);
|
||||
|
||||
switch (autoDdl) {
|
||||
case CREATE_DROP :
|
||||
|
||||
// Drop view first, otherwise a `DROP TABLE ...` will fail as the type is still
|
||||
// referenced
|
||||
// by a view.
|
||||
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.VIEW)
|
||||
.forEach(e -> tableOps.dropView(e));
|
||||
|
||||
// Drop tables second, before DROP TYPE otherwise a `DROP TYPE ...` will fail as
|
||||
// the type is
|
||||
// still referenced by a table.
|
||||
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.TABLE)
|
||||
.forEach(e -> tableOps.dropTable(e));
|
||||
|
||||
eachUserTypeInReverseOrder(userTypeOps, e -> userTypeOps.dropUserType(e));
|
||||
|
||||
// FALLTHRU to CREATE case (read: the absence of a `break;` statement here is
|
||||
// intentional!)
|
||||
case CREATE :
|
||||
eachUserTypeInOrder(userTypeOps, e -> userTypeOps.createUserType(e));
|
||||
|
||||
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.TABLE)
|
||||
.forEach(e -> tableOps.createTable(e));
|
||||
|
||||
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.VIEW)
|
||||
.forEach(e -> tableOps.createView(e));
|
||||
|
||||
break;
|
||||
|
||||
case VALIDATE :
|
||||
eachUserTypeInOrder(userTypeOps, e -> userTypeOps.validateUserType(getUserType(e), e));
|
||||
|
||||
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.TABLE)
|
||||
.forEach(e -> tableOps.validateTable(getTableMetadata(e), e));
|
||||
|
||||
break;
|
||||
|
||||
case UPDATE :
|
||||
eachUserTypeInOrder(userTypeOps, e -> userTypeOps.updateUserType(getUserType(e), e));
|
||||
|
||||
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.VIEW)
|
||||
.forEach(e -> tableOps.dropView(e));
|
||||
|
||||
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.TABLE)
|
||||
.forEach(e -> tableOps.updateTable(getTableMetadata(e), e));
|
||||
|
||||
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.VIEW)
|
||||
.forEach(e -> tableOps.createView(e));
|
||||
break;
|
||||
}
|
||||
|
||||
KeyspaceMetadata km = getKeyspaceMetadata();
|
||||
|
||||
for (UserType userType : km.getUserTypes()) {
|
||||
sessionRepository.addUserType(userType.getTypeName(), userType);
|
||||
}
|
||||
}
|
||||
|
||||
private void eachUserTypeInOrder(UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) {
|
||||
|
||||
Set<HelenusEntity> processedSet = new HashSet<HelenusEntity>();
|
||||
Set<HelenusEntity> stack = new HashSet<HelenusEntity>();
|
||||
|
||||
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.UDT).forEach(e -> {
|
||||
stack.clear();
|
||||
eachUserTypeInRecursion(e, processedSet, stack, userTypeOps, action);
|
||||
});
|
||||
}
|
||||
|
||||
private void eachUserTypeInReverseOrder(UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) {
|
||||
ArrayDeque<HelenusEntity> deque = new ArrayDeque<>();
|
||||
eachUserTypeInOrder(userTypeOps, e -> deque.addFirst(e));
|
||||
deque.stream().forEach(e -> {
|
||||
action.accept(e);
|
||||
});
|
||||
}
|
||||
|
||||
private void eachUserTypeInRecursion(HelenusEntity e, Set<HelenusEntity> processedSet, Set<HelenusEntity> stack,
|
||||
UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) {
|
||||
|
||||
stack.add(e);
|
||||
|
||||
Collection<HelenusEntity> createBefore = sessionRepository.getUserTypeUses(e);
|
||||
|
||||
for (HelenusEntity be : createBefore) {
|
||||
if (!processedSet.contains(be) && !stack.contains(be)) {
|
||||
eachUserTypeInRecursion(be, processedSet, stack, userTypeOps, action);
|
||||
processedSet.add(be);
|
||||
}
|
||||
}
|
||||
|
||||
if (!processedSet.contains(e)) {
|
||||
action.accept(e);
|
||||
processedSet.add(e);
|
||||
}
|
||||
}
|
||||
|
||||
private KeyspaceMetadata getKeyspaceMetadata() {
|
||||
if (keyspaceMetadata == null) {
|
||||
keyspaceMetadata = session.getCluster().getMetadata().getKeyspace(usingKeyspace.toLowerCase());
|
||||
}
|
||||
return keyspaceMetadata;
|
||||
}
|
||||
|
||||
private TableMetadata getTableMetadata(HelenusEntity entity) {
|
||||
return getKeyspaceMetadata().getTable(entity.getName().getName());
|
||||
}
|
||||
|
||||
private UserType getUserType(HelenusEntity entity) {
|
||||
return getKeyspaceMetadata().getUserType(entity.getName().getName());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,30 +15,31 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import java.util.Collection;
|
||||
|
||||
import com.datastax.driver.core.UserType;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import java.util.Collection;
|
||||
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
|
||||
public final class SessionRepository {
|
||||
|
||||
private final ImmutableMap<String, UserType> userTypeMap;
|
||||
private final ImmutableMap<String, UserType> userTypeMap;
|
||||
|
||||
private final ImmutableMap<Class<?>, HelenusEntity> entityMap;
|
||||
private final ImmutableMap<Class<?>, HelenusEntity> entityMap;
|
||||
|
||||
public SessionRepository(SessionRepositoryBuilder builder) {
|
||||
public SessionRepository(SessionRepositoryBuilder builder) {
|
||||
|
||||
userTypeMap = ImmutableMap.<String, UserType>builder().putAll(builder.getUserTypeMap()).build();
|
||||
userTypeMap = ImmutableMap.<String, UserType>builder().putAll(builder.getUserTypeMap()).build();
|
||||
|
||||
entityMap =
|
||||
ImmutableMap.<Class<?>, HelenusEntity>builder().putAll(builder.getEntityMap()).build();
|
||||
}
|
||||
entityMap = ImmutableMap.<Class<?>, HelenusEntity>builder().putAll(builder.getEntityMap()).build();
|
||||
}
|
||||
|
||||
public UserType findUserType(String name) {
|
||||
return userTypeMap.get(name.toLowerCase());
|
||||
}
|
||||
public UserType findUserType(String name) {
|
||||
return userTypeMap.get(name.toLowerCase());
|
||||
}
|
||||
|
||||
public Collection<HelenusEntity> entities() {
|
||||
return entityMap.values();
|
||||
}
|
||||
public Collection<HelenusEntity> entities() {
|
||||
return entityMap.values();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,15 +15,17 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
import com.datastax.driver.core.Session;
|
||||
import com.datastax.driver.core.UDTValue;
|
||||
import com.datastax.driver.core.UserType;
|
||||
import com.google.common.collect.HashMultimap;
|
||||
import com.google.common.collect.Multimap;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.mapping.HelenusEntityType;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
|
@ -33,112 +35,110 @@ import net.helenus.support.HelenusMappingException;
|
|||
|
||||
public final class SessionRepositoryBuilder {
|
||||
|
||||
private static final Optional<HelenusEntityType> OPTIONAL_UDT =
|
||||
Optional.of(HelenusEntityType.UDT);
|
||||
private static final Optional<HelenusEntityType> OPTIONAL_UDT = Optional.of(HelenusEntityType.UDT);
|
||||
|
||||
private final Map<Class<?>, HelenusEntity> entityMap = new HashMap<Class<?>, HelenusEntity>();
|
||||
private final Map<Class<?>, HelenusEntity> entityMap = new HashMap<Class<?>, HelenusEntity>();
|
||||
|
||||
private final Map<String, UserType> userTypeMap = new HashMap<String, UserType>();
|
||||
private final Map<String, UserType> userTypeMap = new HashMap<String, UserType>();
|
||||
|
||||
private final Multimap<HelenusEntity, HelenusEntity> userTypeUsesMap = HashMultimap.create();
|
||||
private final Multimap<HelenusEntity, HelenusEntity> userTypeUsesMap = HashMultimap.create();
|
||||
|
||||
private final Session session;
|
||||
private final Session session;
|
||||
|
||||
SessionRepositoryBuilder(Session session) {
|
||||
this.session = session;
|
||||
}
|
||||
SessionRepositoryBuilder(Session session) {
|
||||
this.session = session;
|
||||
}
|
||||
|
||||
public SessionRepository build() {
|
||||
return new SessionRepository(this);
|
||||
}
|
||||
public SessionRepository build() {
|
||||
return new SessionRepository(this);
|
||||
}
|
||||
|
||||
public Collection<HelenusEntity> getUserTypeUses(HelenusEntity udtName) {
|
||||
return userTypeUsesMap.get(udtName);
|
||||
}
|
||||
public Collection<HelenusEntity> getUserTypeUses(HelenusEntity udtName) {
|
||||
return userTypeUsesMap.get(udtName);
|
||||
}
|
||||
|
||||
public Collection<HelenusEntity> entities() {
|
||||
return entityMap.values();
|
||||
}
|
||||
public Collection<HelenusEntity> entities() {
|
||||
return entityMap.values();
|
||||
}
|
||||
|
||||
protected Map<Class<?>, HelenusEntity> getEntityMap() {
|
||||
return entityMap;
|
||||
}
|
||||
protected Map<Class<?>, HelenusEntity> getEntityMap() {
|
||||
return entityMap;
|
||||
}
|
||||
|
||||
protected Map<String, UserType> getUserTypeMap() {
|
||||
return userTypeMap;
|
||||
}
|
||||
protected Map<String, UserType> getUserTypeMap() {
|
||||
return userTypeMap;
|
||||
}
|
||||
|
||||
public void addUserType(String name, UserType userType) {
|
||||
userTypeMap.putIfAbsent(name.toLowerCase(), userType);
|
||||
}
|
||||
public void addUserType(String name, UserType userType) {
|
||||
userTypeMap.putIfAbsent(name.toLowerCase(), userType);
|
||||
}
|
||||
|
||||
public HelenusEntity add(Object dsl) {
|
||||
return add(dsl, Optional.empty());
|
||||
}
|
||||
public HelenusEntity add(Object dsl) {
|
||||
return add(dsl, Optional.empty());
|
||||
}
|
||||
|
||||
public void addEntity(HelenusEntity entity) {
|
||||
public void addEntity(HelenusEntity entity) {
|
||||
|
||||
HelenusEntity concurrentEntity = entityMap.putIfAbsent(entity.getMappingInterface(), entity);
|
||||
HelenusEntity concurrentEntity = entityMap.putIfAbsent(entity.getMappingInterface(), entity);
|
||||
|
||||
if (concurrentEntity == null) {
|
||||
addUserDefinedTypes(entity.getOrderedProperties());
|
||||
}
|
||||
}
|
||||
if (concurrentEntity == null) {
|
||||
addUserDefinedTypes(entity.getOrderedProperties());
|
||||
}
|
||||
}
|
||||
|
||||
public HelenusEntity add(Object dsl, Optional<HelenusEntityType> type) {
|
||||
public HelenusEntity add(Object dsl, Optional<HelenusEntityType> type) {
|
||||
|
||||
HelenusEntity helenusEntity = Helenus.resolve(dsl, session.getCluster().getMetadata());
|
||||
HelenusEntity helenusEntity = Helenus.resolve(dsl, session.getCluster().getMetadata());
|
||||
|
||||
Class<?> iface = helenusEntity.getMappingInterface();
|
||||
Class<?> iface = helenusEntity.getMappingInterface();
|
||||
|
||||
HelenusEntity entity = entityMap.get(iface);
|
||||
HelenusEntity entity = entityMap.get(iface);
|
||||
|
||||
if (entity == null) {
|
||||
if (entity == null) {
|
||||
|
||||
entity = helenusEntity;
|
||||
entity = helenusEntity;
|
||||
|
||||
if (type.isPresent() && entity.getType() != type.get()) {
|
||||
throw new HelenusMappingException(
|
||||
"unexpected entity type " + entity.getType() + " for " + entity);
|
||||
}
|
||||
if (type.isPresent() && entity.getType() != type.get()) {
|
||||
throw new HelenusMappingException("unexpected entity type " + entity.getType() + " for " + entity);
|
||||
}
|
||||
|
||||
HelenusEntity concurrentEntity = entityMap.putIfAbsent(iface, entity);
|
||||
HelenusEntity concurrentEntity = entityMap.putIfAbsent(iface, entity);
|
||||
|
||||
if (concurrentEntity == null) {
|
||||
addUserDefinedTypes(entity.getOrderedProperties());
|
||||
} else {
|
||||
entity = concurrentEntity;
|
||||
}
|
||||
}
|
||||
if (concurrentEntity == null) {
|
||||
addUserDefinedTypes(entity.getOrderedProperties());
|
||||
} else {
|
||||
entity = concurrentEntity;
|
||||
}
|
||||
}
|
||||
|
||||
return entity;
|
||||
}
|
||||
return entity;
|
||||
}
|
||||
|
||||
private void addUserDefinedTypes(Collection<HelenusProperty> props) {
|
||||
private void addUserDefinedTypes(Collection<HelenusProperty> props) {
|
||||
|
||||
for (HelenusProperty prop : props) {
|
||||
for (HelenusProperty prop : props) {
|
||||
|
||||
AbstractDataType type = prop.getDataType();
|
||||
AbstractDataType type = prop.getDataType();
|
||||
|
||||
if (type instanceof DTDataType) {
|
||||
continue;
|
||||
}
|
||||
if (type instanceof DTDataType) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!UDTValue.class.isAssignableFrom(prop.getJavaType())) {
|
||||
if (!UDTValue.class.isAssignableFrom(prop.getJavaType())) {
|
||||
|
||||
for (Class<?> udtClass : type.getTypeArguments()) {
|
||||
for (Class<?> udtClass : type.getTypeArguments()) {
|
||||
|
||||
if (UDTValue.class.isAssignableFrom(udtClass)) {
|
||||
continue;
|
||||
}
|
||||
if (UDTValue.class.isAssignableFrom(udtClass)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
HelenusEntity addedUserType = add(udtClass, OPTIONAL_UDT);
|
||||
HelenusEntity addedUserType = add(udtClass, OPTIONAL_UDT);
|
||||
|
||||
if (HelenusEntityType.UDT == prop.getEntity().getType()) {
|
||||
userTypeUsesMap.put(prop.getEntity(), addedUserType);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (HelenusEntityType.UDT == prop.getEntity().getType()) {
|
||||
userTypeUsesMap.put(prop.getEntity(), addedUserType);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,97 +15,88 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import com.datastax.driver.core.TableMetadata;
|
||||
import com.datastax.driver.core.schemabuilder.SchemaStatement;
|
||||
import java.util.List;
|
||||
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.support.HelenusException;
|
||||
|
||||
public final class TableOperations {
|
||||
|
||||
private final AbstractSessionOperations sessionOps;
|
||||
private final boolean dropUnusedColumns;
|
||||
private final boolean dropUnusedIndexes;
|
||||
private final AbstractSessionOperations sessionOps;
|
||||
private final boolean dropUnusedColumns;
|
||||
private final boolean dropUnusedIndexes;
|
||||
|
||||
public TableOperations(
|
||||
AbstractSessionOperations sessionOps, boolean dropUnusedColumns, boolean dropUnusedIndexes) {
|
||||
this.sessionOps = sessionOps;
|
||||
this.dropUnusedColumns = dropUnusedColumns;
|
||||
this.dropUnusedIndexes = dropUnusedIndexes;
|
||||
}
|
||||
public TableOperations(AbstractSessionOperations sessionOps, boolean dropUnusedColumns, boolean dropUnusedIndexes) {
|
||||
this.sessionOps = sessionOps;
|
||||
this.dropUnusedColumns = dropUnusedColumns;
|
||||
this.dropUnusedIndexes = dropUnusedIndexes;
|
||||
}
|
||||
|
||||
public void createTable(HelenusEntity entity) {
|
||||
sessionOps.execute(SchemaUtil.createTable(entity), true);
|
||||
executeBatch(SchemaUtil.createIndexes(entity));
|
||||
}
|
||||
public void createTable(HelenusEntity entity) {
|
||||
sessionOps.execute(SchemaUtil.createTable(entity), true);
|
||||
executeBatch(SchemaUtil.createIndexes(entity));
|
||||
}
|
||||
|
||||
public void dropTable(HelenusEntity entity) {
|
||||
sessionOps.execute(SchemaUtil.dropTable(entity), true);
|
||||
}
|
||||
public void dropTable(HelenusEntity entity) {
|
||||
sessionOps.execute(SchemaUtil.dropTable(entity), true);
|
||||
}
|
||||
|
||||
public void validateTable(TableMetadata tmd, HelenusEntity entity) {
|
||||
public void validateTable(TableMetadata tmd, HelenusEntity entity) {
|
||||
|
||||
if (tmd == null) {
|
||||
throw new HelenusException(
|
||||
"table does not exists "
|
||||
+ entity.getName()
|
||||
+ "for entity "
|
||||
+ entity.getMappingInterface());
|
||||
}
|
||||
if (tmd == null) {
|
||||
throw new HelenusException(
|
||||
"table does not exists " + entity.getName() + "for entity " + entity.getMappingInterface());
|
||||
}
|
||||
|
||||
List<SchemaStatement> list = SchemaUtil.alterTable(tmd, entity, dropUnusedColumns);
|
||||
List<SchemaStatement> list = SchemaUtil.alterTable(tmd, entity, dropUnusedColumns);
|
||||
|
||||
list.addAll(SchemaUtil.alterIndexes(tmd, entity, dropUnusedIndexes));
|
||||
list.addAll(SchemaUtil.alterIndexes(tmd, entity, dropUnusedIndexes));
|
||||
|
||||
if (!list.isEmpty()) {
|
||||
throw new HelenusException(
|
||||
"schema changed for entity "
|
||||
+ entity.getMappingInterface()
|
||||
+ ", apply this command: "
|
||||
+ list);
|
||||
}
|
||||
}
|
||||
if (!list.isEmpty()) {
|
||||
throw new HelenusException(
|
||||
"schema changed for entity " + entity.getMappingInterface() + ", apply this command: " + list);
|
||||
}
|
||||
}
|
||||
|
||||
public void updateTable(TableMetadata tmd, HelenusEntity entity) {
|
||||
if (tmd == null) {
|
||||
createTable(entity);
|
||||
return;
|
||||
}
|
||||
public void updateTable(TableMetadata tmd, HelenusEntity entity) {
|
||||
if (tmd == null) {
|
||||
createTable(entity);
|
||||
return;
|
||||
}
|
||||
|
||||
executeBatch(SchemaUtil.alterTable(tmd, entity, dropUnusedColumns));
|
||||
executeBatch(SchemaUtil.alterIndexes(tmd, entity, dropUnusedIndexes));
|
||||
}
|
||||
executeBatch(SchemaUtil.alterTable(tmd, entity, dropUnusedColumns));
|
||||
executeBatch(SchemaUtil.alterIndexes(tmd, entity, dropUnusedIndexes));
|
||||
}
|
||||
|
||||
public void createView(HelenusEntity entity) {
|
||||
sessionOps.execute(
|
||||
SchemaUtil.createMaterializedView(
|
||||
sessionOps.usingKeyspace(), entity.getName().toCql(), entity),
|
||||
true);
|
||||
// executeBatch(SchemaUtil.createIndexes(entity)); NOTE: Unfortunately C* 3.10 does not yet support 2i on materialized views.
|
||||
}
|
||||
public void createView(HelenusEntity entity) {
|
||||
sessionOps.execute(
|
||||
SchemaUtil.createMaterializedView(sessionOps.usingKeyspace(), entity.getName().toCql(), entity), true);
|
||||
// executeBatch(SchemaUtil.createIndexes(entity)); NOTE: Unfortunately C* 3.10
|
||||
// does not yet support 2i on materialized views.
|
||||
}
|
||||
|
||||
public void dropView(HelenusEntity entity) {
|
||||
sessionOps.execute(
|
||||
SchemaUtil.dropMaterializedView(
|
||||
sessionOps.usingKeyspace(), entity.getName().toCql(), entity),
|
||||
true);
|
||||
}
|
||||
public void dropView(HelenusEntity entity) {
|
||||
sessionOps.execute(
|
||||
SchemaUtil.dropMaterializedView(sessionOps.usingKeyspace(), entity.getName().toCql(), entity), true);
|
||||
}
|
||||
|
||||
public void updateView(TableMetadata tmd, HelenusEntity entity) {
|
||||
if (tmd == null) {
|
||||
createTable(entity);
|
||||
return;
|
||||
}
|
||||
public void updateView(TableMetadata tmd, HelenusEntity entity) {
|
||||
if (tmd == null) {
|
||||
createTable(entity);
|
||||
return;
|
||||
}
|
||||
|
||||
executeBatch(SchemaUtil.alterTable(tmd, entity, dropUnusedColumns));
|
||||
executeBatch(SchemaUtil.alterIndexes(tmd, entity, dropUnusedIndexes));
|
||||
}
|
||||
executeBatch(SchemaUtil.alterTable(tmd, entity, dropUnusedColumns));
|
||||
executeBatch(SchemaUtil.alterIndexes(tmd, entity, dropUnusedIndexes));
|
||||
}
|
||||
|
||||
private void executeBatch(List<SchemaStatement> list) {
|
||||
private void executeBatch(List<SchemaStatement> list) {
|
||||
|
||||
list.forEach(
|
||||
s -> {
|
||||
sessionOps.execute(s, true);
|
||||
});
|
||||
}
|
||||
list.forEach(s -> {
|
||||
sessionOps.execute(s, true);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,42 +15,54 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import com.google.common.base.Stopwatch;
|
||||
|
||||
public interface UnitOfWork<E extends Exception> extends AutoCloseable {
|
||||
import net.helenus.core.cache.Facet;
|
||||
|
||||
/**
|
||||
* Marks the beginning of a transactional section of work. Will write a record to the shared
|
||||
* write-ahead log.
|
||||
*
|
||||
* @return the handle used to commit or abort the work.
|
||||
*/
|
||||
UnitOfWork begin();
|
||||
public interface UnitOfWork<X extends Exception> extends AutoCloseable {
|
||||
|
||||
UnitOfWork addNestedUnitOfWork(UnitOfWork uow);
|
||||
/**
|
||||
* Marks the beginning of a transactional section of work. Will write a record
|
||||
* to the shared write-ahead log.
|
||||
*
|
||||
* @return the handle used to commit or abort the work.
|
||||
*/
|
||||
UnitOfWork<X> begin();
|
||||
|
||||
/**
|
||||
* Checks to see if the work performed between calling begin and now can be committed or not.
|
||||
*
|
||||
* @return a function from which to chain work that only happens when commit is successful
|
||||
* @throws E when the work overlaps with other concurrent writers.
|
||||
*/
|
||||
PostCommitFunction<Void, Void> commit() throws E;
|
||||
void addNestedUnitOfWork(UnitOfWork<X> uow);
|
||||
|
||||
/**
|
||||
* Explicitly abort the work within this unit of work. Any nested aborted unit of work will
|
||||
* trigger the entire unit of work to commit.
|
||||
*/
|
||||
void abort();
|
||||
/**
|
||||
* Checks to see if the work performed between calling begin and now can be
|
||||
* committed or not.
|
||||
*
|
||||
* @return a function from which to chain work that only happens when commit is
|
||||
* successful
|
||||
* @throws X
|
||||
* when the work overlaps with other concurrent writers.
|
||||
*/
|
||||
PostCommitFunction<Void, Void> commit() throws X;
|
||||
|
||||
boolean hasAborted();
|
||||
/**
|
||||
* Explicitly abort the work within this unit of work. Any nested aborted unit
|
||||
* of work will trigger the entire unit of work to commit.
|
||||
*/
|
||||
void abort();
|
||||
|
||||
boolean hasCommitted();
|
||||
boolean hasAborted();
|
||||
|
||||
//Either<Object, Set<Object>> cacheLookup(String key);
|
||||
Set<Object> cacheLookup(String key);
|
||||
boolean hasCommitted();
|
||||
|
||||
Optional<Object> cacheLookup(List<Facet> facets);
|
||||
|
||||
void cacheUpdate(Object pojo, List<Facet> facets);
|
||||
|
||||
UnitOfWork setPurpose(String purpose);
|
||||
|
||||
Stopwatch getExecutionTimer();
|
||||
|
||||
Stopwatch getCacheLookupTimer();
|
||||
|
||||
Map<String, Set<Object>> getCache();
|
||||
}
|
||||
|
|
|
@ -19,8 +19,8 @@ import net.helenus.support.HelenusException;
|
|||
|
||||
class UnitOfWorkImpl extends AbstractUnitOfWork<HelenusException> {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public UnitOfWorkImpl(HelenusSession session, UnitOfWork parent) {
|
||||
super(session, (AbstractUnitOfWork<HelenusException>) parent);
|
||||
}
|
||||
@SuppressWarnings("unchecked")
|
||||
public UnitOfWorkImpl(HelenusSession session, UnitOfWork parent) {
|
||||
super(session, (AbstractUnitOfWork<HelenusException>) parent);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,65 +15,63 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import com.datastax.driver.core.UserType;
|
||||
import com.datastax.driver.core.schemabuilder.SchemaStatement;
|
||||
import java.util.List;
|
||||
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.support.HelenusException;
|
||||
|
||||
public final class UserTypeOperations {
|
||||
|
||||
private final AbstractSessionOperations sessionOps;
|
||||
private final boolean dropUnusedColumns;
|
||||
private final AbstractSessionOperations sessionOps;
|
||||
private final boolean dropUnusedColumns;
|
||||
|
||||
public UserTypeOperations(AbstractSessionOperations sessionOps, boolean dropUnusedColumns) {
|
||||
this.sessionOps = sessionOps;
|
||||
this.dropUnusedColumns = dropUnusedColumns;
|
||||
}
|
||||
public UserTypeOperations(AbstractSessionOperations sessionOps, boolean dropUnusedColumns) {
|
||||
this.sessionOps = sessionOps;
|
||||
this.dropUnusedColumns = dropUnusedColumns;
|
||||
}
|
||||
|
||||
public void createUserType(HelenusEntity entity) {
|
||||
public void createUserType(HelenusEntity entity) {
|
||||
|
||||
sessionOps.execute(SchemaUtil.createUserType(entity), true);
|
||||
}
|
||||
sessionOps.execute(SchemaUtil.createUserType(entity), true);
|
||||
}
|
||||
|
||||
public void dropUserType(HelenusEntity entity) {
|
||||
public void dropUserType(HelenusEntity entity) {
|
||||
|
||||
sessionOps.execute(SchemaUtil.dropUserType(entity), true);
|
||||
}
|
||||
sessionOps.execute(SchemaUtil.dropUserType(entity), true);
|
||||
}
|
||||
|
||||
public void validateUserType(UserType userType, HelenusEntity entity) {
|
||||
public void validateUserType(UserType userType, HelenusEntity entity) {
|
||||
|
||||
if (userType == null) {
|
||||
throw new HelenusException(
|
||||
"userType not exists " + entity.getName() + "for entity " + entity.getMappingInterface());
|
||||
}
|
||||
if (userType == null) {
|
||||
throw new HelenusException(
|
||||
"userType not exists " + entity.getName() + "for entity " + entity.getMappingInterface());
|
||||
}
|
||||
|
||||
List<SchemaStatement> list = SchemaUtil.alterUserType(userType, entity, dropUnusedColumns);
|
||||
List<SchemaStatement> list = SchemaUtil.alterUserType(userType, entity, dropUnusedColumns);
|
||||
|
||||
if (!list.isEmpty()) {
|
||||
throw new HelenusException(
|
||||
"schema changed for entity "
|
||||
+ entity.getMappingInterface()
|
||||
+ ", apply this command: "
|
||||
+ list);
|
||||
}
|
||||
}
|
||||
if (!list.isEmpty()) {
|
||||
throw new HelenusException(
|
||||
"schema changed for entity " + entity.getMappingInterface() + ", apply this command: " + list);
|
||||
}
|
||||
}
|
||||
|
||||
public void updateUserType(UserType userType, HelenusEntity entity) {
|
||||
public void updateUserType(UserType userType, HelenusEntity entity) {
|
||||
|
||||
if (userType == null) {
|
||||
createUserType(entity);
|
||||
return;
|
||||
}
|
||||
if (userType == null) {
|
||||
createUserType(entity);
|
||||
return;
|
||||
}
|
||||
|
||||
executeBatch(SchemaUtil.alterUserType(userType, entity, dropUnusedColumns));
|
||||
}
|
||||
executeBatch(SchemaUtil.alterUserType(userType, entity, dropUnusedColumns));
|
||||
}
|
||||
|
||||
private void executeBatch(List<SchemaStatement> list) {
|
||||
private void executeBatch(List<SchemaStatement> list) {
|
||||
|
||||
list.forEach(
|
||||
s -> {
|
||||
sessionOps.execute(s, true);
|
||||
});
|
||||
}
|
||||
list.forEach(s -> {
|
||||
sessionOps.execute(s, true);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,3 +1,18 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.helenus.core.annotation;
|
||||
|
||||
import java.lang.annotation.ElementType;
|
||||
|
@ -7,4 +22,5 @@ import java.lang.annotation.Target;
|
|||
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(ElementType.TYPE)
|
||||
public @interface Cacheable {}
|
||||
public @interface Cacheable {
|
||||
}
|
||||
|
|
|
@ -4,13 +4,14 @@ import java.lang.annotation.ElementType;
|
|||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
import net.helenus.core.ConflictingUnitOfWorkException;
|
||||
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(ElementType.METHOD)
|
||||
public @interface Retry {
|
||||
|
||||
Class<? extends Exception>[] on() default ConflictingUnitOfWorkException.class;
|
||||
Class<? extends Exception>[] on() default ConflictingUnitOfWorkException.class;
|
||||
|
||||
int times() default 3;
|
||||
int times() default 3;
|
||||
}
|
||||
|
|
83
src/main/java/net/helenus/core/aspect/RetryAspect.java
Normal file
83
src/main/java/net/helenus/core/aspect/RetryAspect.java
Normal file
|
@ -0,0 +1,83 @@
|
|||
package net.helenus.core.aspect;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.commons.lang3.exception.ExceptionUtils;
|
||||
import org.aspectj.lang.ProceedingJoinPoint;
|
||||
import org.aspectj.lang.annotation.Around;
|
||||
import org.aspectj.lang.annotation.Aspect;
|
||||
import org.aspectj.lang.reflect.MethodSignature;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.core.annotation.AnnotationUtils;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import net.helenus.core.annotation.Retry;
|
||||
|
||||
@Aspect
|
||||
public class RetryAspect {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(RetryAspect.class);
|
||||
|
||||
@Around("@annotation(net.helenus.core.annotations.Retry)")
|
||||
public Object retry(ProceedingJoinPoint pjp) throws Throwable {
|
||||
Retry retryAnnotation = getRetryAnnotation(pjp);
|
||||
return (retryAnnotation != null) ? proceed(pjp, retryAnnotation) : proceed(pjp);
|
||||
}
|
||||
|
||||
private Object proceed(ProceedingJoinPoint pjp) throws Throwable {
|
||||
return pjp.proceed();
|
||||
}
|
||||
|
||||
private Object proceed(ProceedingJoinPoint pjp, Retry retryAnnotation) throws Throwable {
|
||||
int times = retryAnnotation.times();
|
||||
Class<? extends Throwable>[] retryOn = retryAnnotation.on();
|
||||
Assert.isTrue(times > 0, "@Retry{times} should be greater than 0!");
|
||||
Assert.isTrue(retryOn.length > 0, "@Retry{on} should have at least one Throwable!");
|
||||
log.info("Proceed with {} retries on {}", times, Arrays.toString(retryOn));
|
||||
return tryProceeding(pjp, times, retryOn);
|
||||
}
|
||||
|
||||
private Object tryProceeding(ProceedingJoinPoint pjp, int times, Class<? extends Throwable>[] retryOn)
|
||||
throws Throwable {
|
||||
try {
|
||||
return proceed(pjp);
|
||||
} catch (Throwable throwable) {
|
||||
if (isRetryThrowable(throwable, retryOn) && times-- > 0) {
|
||||
log.info("Conflict detected, {} remaining retries on {}", times, Arrays.toString(retryOn));
|
||||
return tryProceeding(pjp, times, retryOn);
|
||||
}
|
||||
throw throwable;
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isRetryThrowable(Throwable throwable, Class<? extends Throwable>[] retryOn) {
|
||||
Throwable[] causes = ExceptionUtils.getThrowables(throwable);
|
||||
for (Throwable cause : causes) {
|
||||
for (Class<? extends Throwable> retryThrowable : retryOn) {
|
||||
if (retryThrowable.isAssignableFrom(cause.getClass())) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private Retry getRetryAnnotation(ProceedingJoinPoint pjp) throws NoSuchMethodException {
|
||||
MethodSignature signature = (MethodSignature) pjp.getSignature();
|
||||
Method method = signature.getMethod();
|
||||
Retry retryAnnotation = AnnotationUtils.findAnnotation(method, Retry.class);
|
||||
|
||||
if (retryAnnotation != null) {
|
||||
return retryAnnotation;
|
||||
}
|
||||
|
||||
Class[] argClasses = new Class[pjp.getArgs().length];
|
||||
for (int i = 0; i < pjp.getArgs().length; i++) {
|
||||
argClasses[i] = pjp.getArgs()[i].getClass();
|
||||
}
|
||||
method = pjp.getTarget().getClass().getMethod(pjp.getSignature().getName(), argClasses);
|
||||
return AnnotationUtils.findAnnotation(method, Retry.class);
|
||||
}
|
||||
}
|
|
@ -2,7 +2,7 @@ package net.helenus.core.aspect;
|
|||
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.Arrays;
|
||||
import net.helenus.core.annotation.Retry;
|
||||
|
||||
import org.apache.commons.lang3.exception.ExceptionUtils;
|
||||
import org.aspectj.lang.ProceedingJoinPoint;
|
||||
import org.aspectj.lang.annotation.Around;
|
||||
|
@ -13,69 +13,71 @@ import org.slf4j.LoggerFactory;
|
|||
import org.springframework.core.annotation.AnnotationUtils;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import net.helenus.core.annotation.Retry;
|
||||
|
||||
@Aspect
|
||||
public class RetryConcurrentUnitOfWorkAspect {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(RetryConcurrentUnitOfWorkAspect.class);
|
||||
private static final Logger log = LoggerFactory.getLogger(RetryConcurrentUnitOfWorkAspect.class);
|
||||
|
||||
@Around("@annotation(net.helenus.core.annotations.Retry)")
|
||||
public Object retry(ProceedingJoinPoint pjp) throws Throwable {
|
||||
Retry retryAnnotation = getRetryAnnotation(pjp);
|
||||
return (retryAnnotation != null) ? proceed(pjp, retryAnnotation) : proceed(pjp);
|
||||
}
|
||||
@Around("@annotation(net.helenus.core.annotations.Retry)")
|
||||
public Object retry(ProceedingJoinPoint pjp) throws Throwable {
|
||||
Retry retryAnnotation = getRetryAnnotation(pjp);
|
||||
return (retryAnnotation != null) ? proceed(pjp, retryAnnotation) : proceed(pjp);
|
||||
}
|
||||
|
||||
private Object proceed(ProceedingJoinPoint pjp) throws Throwable {
|
||||
return pjp.proceed();
|
||||
}
|
||||
private Object proceed(ProceedingJoinPoint pjp) throws Throwable {
|
||||
return pjp.proceed();
|
||||
}
|
||||
|
||||
private Object proceed(ProceedingJoinPoint pjp, Retry retryAnnotation) throws Throwable {
|
||||
int times = retryAnnotation.times();
|
||||
Class<? extends Throwable>[] retryOn = retryAnnotation.on();
|
||||
Assert.isTrue(times > 0, "@Retry{times} should be greater than 0!");
|
||||
Assert.isTrue(retryOn.length > 0, "@Retry{on} should have at least one Throwable!");
|
||||
log.info("Proceed with {} retries on {}", times, Arrays.toString(retryOn));
|
||||
return tryProceeding(pjp, times, retryOn);
|
||||
}
|
||||
private Object proceed(ProceedingJoinPoint pjp, Retry retryAnnotation) throws Throwable {
|
||||
int times = retryAnnotation.times();
|
||||
Class<? extends Throwable>[] retryOn = retryAnnotation.on();
|
||||
Assert.isTrue(times > 0, "@Retry{times} should be greater than 0!");
|
||||
Assert.isTrue(retryOn.length > 0, "@Retry{on} should have at least one Throwable!");
|
||||
log.info("Proceed with {} retries on {}", times, Arrays.toString(retryOn));
|
||||
return tryProceeding(pjp, times, retryOn);
|
||||
}
|
||||
|
||||
private Object tryProceeding(
|
||||
ProceedingJoinPoint pjp, int times, Class<? extends Throwable>[] retryOn) throws Throwable {
|
||||
try {
|
||||
return proceed(pjp);
|
||||
} catch (Throwable throwable) {
|
||||
if (isRetryThrowable(throwable, retryOn) && times-- > 0) {
|
||||
log.info("Conflict detected, {} remaining retries on {}", times, Arrays.toString(retryOn));
|
||||
return tryProceeding(pjp, times, retryOn);
|
||||
}
|
||||
throw throwable;
|
||||
}
|
||||
}
|
||||
private Object tryProceeding(ProceedingJoinPoint pjp, int times, Class<? extends Throwable>[] retryOn)
|
||||
throws Throwable {
|
||||
try {
|
||||
return proceed(pjp);
|
||||
} catch (Throwable throwable) {
|
||||
if (isRetryThrowable(throwable, retryOn) && times-- > 0) {
|
||||
log.info("Conflict detected, {} remaining retries on {}", times, Arrays.toString(retryOn));
|
||||
return tryProceeding(pjp, times, retryOn);
|
||||
}
|
||||
throw throwable;
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isRetryThrowable(Throwable throwable, Class<? extends Throwable>[] retryOn) {
|
||||
Throwable[] causes = ExceptionUtils.getThrowables(throwable);
|
||||
for (Throwable cause : causes) {
|
||||
for (Class<? extends Throwable> retryThrowable : retryOn) {
|
||||
if (retryThrowable.isAssignableFrom(cause.getClass())) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
private boolean isRetryThrowable(Throwable throwable, Class<? extends Throwable>[] retryOn) {
|
||||
Throwable[] causes = ExceptionUtils.getThrowables(throwable);
|
||||
for (Throwable cause : causes) {
|
||||
for (Class<? extends Throwable> retryThrowable : retryOn) {
|
||||
if (retryThrowable.isAssignableFrom(cause.getClass())) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private Retry getRetryAnnotation(ProceedingJoinPoint pjp) throws NoSuchMethodException {
|
||||
MethodSignature signature = (MethodSignature) pjp.getSignature();
|
||||
Method method = signature.getMethod();
|
||||
Retry retryAnnotation = AnnotationUtils.findAnnotation(method, Retry.class);
|
||||
private Retry getRetryAnnotation(ProceedingJoinPoint pjp) throws NoSuchMethodException {
|
||||
MethodSignature signature = (MethodSignature) pjp.getSignature();
|
||||
Method method = signature.getMethod();
|
||||
Retry retryAnnotation = AnnotationUtils.findAnnotation(method, Retry.class);
|
||||
|
||||
if (retryAnnotation != null) {
|
||||
return retryAnnotation;
|
||||
}
|
||||
if (retryAnnotation != null) {
|
||||
return retryAnnotation;
|
||||
}
|
||||
|
||||
Class[] argClasses = new Class[pjp.getArgs().length];
|
||||
for (int i = 0; i < pjp.getArgs().length; i++) {
|
||||
argClasses[i] = pjp.getArgs()[i].getClass();
|
||||
}
|
||||
method = pjp.getTarget().getClass().getMethod(pjp.getSignature().getName(), argClasses);
|
||||
return AnnotationUtils.findAnnotation(method, Retry.class);
|
||||
}
|
||||
Class[] argClasses = new Class[pjp.getArgs().length];
|
||||
for (int i = 0; i < pjp.getArgs().length; i++) {
|
||||
argClasses[i] = pjp.getArgs()[i].getClass();
|
||||
}
|
||||
method = pjp.getTarget().getClass().getMethod(pjp.getSignature().getName(), argClasses);
|
||||
return AnnotationUtils.findAnnotation(method, Retry.class);
|
||||
}
|
||||
}
|
||||
|
|
38
src/main/java/net/helenus/core/cache/BoundFacet.java
vendored
Normal file
38
src/main/java/net/helenus/core/cache/BoundFacet.java
vendored
Normal file
|
@ -0,0 +1,38 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.helenus.core.cache;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
|
||||
public class BoundFacet extends Facet<String> {
|
||||
private final Map<HelenusProperty, Object> properties;
|
||||
|
||||
BoundFacet(String name, Map<HelenusProperty, Object> properties) {
|
||||
super(name,
|
||||
(properties.keySet().size() > 1)
|
||||
? "[" + String.join(", ",
|
||||
properties.keySet().stream().map(key -> properties.get(key).toString())
|
||||
.collect(Collectors.toSet()))
|
||||
+ "]"
|
||||
: String.join("", properties.keySet().stream().map(key -> properties.get(key).toString())
|
||||
.collect(Collectors.toSet())));
|
||||
this.properties = properties;
|
||||
}
|
||||
|
||||
}
|
49
src/main/java/net/helenus/core/cache/CacheUtil.java
vendored
Normal file
49
src/main/java/net/helenus/core/cache/CacheUtil.java
vendored
Normal file
|
@ -0,0 +1,49 @@
|
|||
package net.helenus.core.cache;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class CacheUtil {
|
||||
|
||||
public static List<String[]> combinations(List<String> items) {
|
||||
int n = items.size();
|
||||
if (n > 20 || n < 0)
|
||||
throw new IllegalArgumentException(n + " is out of range");
|
||||
long e = Math.round(Math.pow(2, n));
|
||||
List<String[]> out = new ArrayList<String[]>((int) e - 1);
|
||||
for (int k = 1; k <= items.size(); k++) {
|
||||
kCombinations(items, 0, k, new String[k], out);
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
private static void kCombinations(List<String> items, int n, int k, String[] arr, List<String[]> out) {
|
||||
if (k == 0) {
|
||||
out.add(arr.clone());
|
||||
} else {
|
||||
for (int i = n; i <= items.size() - k; i++) {
|
||||
arr[arr.length - k] = items.get(i);
|
||||
kCombinations(items, i + 1, k - 1, arr, out);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static List<String[]> flattenFacets(List<Facet> facets) {
|
||||
List<String[]> combinations = CacheUtil.combinations(
|
||||
facets.stream().filter(facet -> !facet.fixed()).filter(facet -> facet.value() != null).map(facet -> {
|
||||
return facet.name() + "==" + facet.value();
|
||||
}).collect(Collectors.toList()));
|
||||
return combinations;
|
||||
}
|
||||
|
||||
public static Object merge(Object to, Object from) {
|
||||
return to; // TODO(gburd): yeah...
|
||||
}
|
||||
|
||||
public static String schemaName(List<Facet> facets) {
|
||||
return facets.stream().filter(Facet::fixed).map(facet -> facet.value().toString())
|
||||
.collect(Collectors.joining("."));
|
||||
}
|
||||
|
||||
}
|
53
src/main/java/net/helenus/core/cache/Facet.java
vendored
Normal file
53
src/main/java/net/helenus/core/cache/Facet.java
vendored
Normal file
|
@ -0,0 +1,53 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package net.helenus.core.cache;
|
||||
|
||||
/**
|
||||
* An Entity is identifiable via one or more Facets
|
||||
*/
|
||||
public class Facet<T> {
|
||||
private final String name;
|
||||
private T value;
|
||||
private boolean fixed = false;
|
||||
|
||||
public Facet(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public Facet(String name, T value) {
|
||||
this.name = name;
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
public String name() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public T value() {
|
||||
return value;
|
||||
}
|
||||
|
||||
public Facet setFixed() {
|
||||
fixed = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
public boolean fixed() {
|
||||
return fixed;
|
||||
}
|
||||
|
||||
}
|
74
src/main/java/net/helenus/core/cache/UnboundFacet.java
vendored
Normal file
74
src/main/java/net/helenus/core/cache/UnboundFacet.java
vendored
Normal file
|
@ -0,0 +1,74 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.helenus.core.cache;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import net.helenus.core.SchemaUtil;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
|
||||
public class UnboundFacet extends Facet<String> {
|
||||
|
||||
private final List<HelenusProperty> properties;
|
||||
|
||||
public UnboundFacet(List<HelenusProperty> properties) {
|
||||
super(SchemaUtil.createPrimaryKeyPhrase(properties));
|
||||
this.properties = properties;
|
||||
}
|
||||
|
||||
public UnboundFacet(HelenusProperty property) {
|
||||
super(property.getPropertyName());
|
||||
properties = new ArrayList<HelenusProperty>();
|
||||
properties.add(property);
|
||||
}
|
||||
|
||||
public List<HelenusProperty> getProperties() {
|
||||
return properties;
|
||||
}
|
||||
|
||||
public Binder binder() {
|
||||
return new Binder(name(), properties);
|
||||
}
|
||||
|
||||
public static class Binder {
|
||||
|
||||
private final String name;
|
||||
private final List<HelenusProperty> properties = new ArrayList<HelenusProperty>();
|
||||
private Map<HelenusProperty, Object> boundProperties = new HashMap<HelenusProperty, Object>();
|
||||
|
||||
Binder(String name, List<HelenusProperty> properties) {
|
||||
this.name = name;
|
||||
this.properties.addAll(properties);
|
||||
}
|
||||
|
||||
public Binder setValueForProperty(HelenusProperty prop, Object value) {
|
||||
properties.remove(prop);
|
||||
boundProperties.put(prop, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public boolean isBound() {
|
||||
return properties.isEmpty();
|
||||
}
|
||||
|
||||
public BoundFacet bind() {
|
||||
return new BoundFacet(name, boundProperties);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -17,92 +17,94 @@ package net.helenus.core.operation;
|
|||
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
||||
import net.helenus.core.*;
|
||||
|
||||
public abstract class AbstractFilterOperation<E, O extends AbstractFilterOperation<E, O>>
|
||||
extends AbstractOperation<E, O> {
|
||||
extends
|
||||
AbstractOperation<E, O> {
|
||||
|
||||
protected List<Filter<?>> filters = null;
|
||||
protected List<Filter<?>> ifFilters = null;
|
||||
protected List<Filter<?>> filters = null;
|
||||
protected List<Filter<?>> ifFilters = null;
|
||||
|
||||
public AbstractFilterOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
}
|
||||
public AbstractFilterOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
}
|
||||
|
||||
public <V> O where(Getter<V> getter, Postulate<V> postulate) {
|
||||
public <V> O where(Getter<V> getter, Postulate<V> postulate) {
|
||||
|
||||
addFilter(Filter.create(getter, postulate));
|
||||
addFilter(Filter.create(getter, postulate));
|
||||
|
||||
return (O) this;
|
||||
}
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public <V> O where(Getter<V> getter, Operator operator, V val) {
|
||||
public <V> O where(Getter<V> getter, Operator operator, V val) {
|
||||
|
||||
addFilter(Filter.create(getter, operator, val));
|
||||
addFilter(Filter.create(getter, operator, val));
|
||||
|
||||
return (O) this;
|
||||
}
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public <V> O where(Filter<V> filter) {
|
||||
public <V> O where(Filter<V> filter) {
|
||||
|
||||
addFilter(filter);
|
||||
addFilter(filter);
|
||||
|
||||
return (O) this;
|
||||
}
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public <V> O and(Getter<V> getter, Postulate<V> postulate) {
|
||||
public <V> O and(Getter<V> getter, Postulate<V> postulate) {
|
||||
|
||||
addFilter(Filter.create(getter, postulate));
|
||||
addFilter(Filter.create(getter, postulate));
|
||||
|
||||
return (O) this;
|
||||
}
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public <V> O and(Getter<V> getter, Operator operator, V val) {
|
||||
public <V> O and(Getter<V> getter, Operator operator, V val) {
|
||||
|
||||
addFilter(Filter.create(getter, operator, val));
|
||||
addFilter(Filter.create(getter, operator, val));
|
||||
|
||||
return (O) this;
|
||||
}
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public <V> O and(Filter<V> filter) {
|
||||
public <V> O and(Filter<V> filter) {
|
||||
|
||||
addFilter(filter);
|
||||
addFilter(filter);
|
||||
|
||||
return (O) this;
|
||||
}
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public <V> O onlyIf(Getter<V> getter, Postulate<V> postulate) {
|
||||
public <V> O onlyIf(Getter<V> getter, Postulate<V> postulate) {
|
||||
|
||||
addIfFilter(Filter.create(getter, postulate));
|
||||
addIfFilter(Filter.create(getter, postulate));
|
||||
|
||||
return (O) this;
|
||||
}
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public <V> O onlyIf(Getter<V> getter, Operator operator, V val) {
|
||||
public <V> O onlyIf(Getter<V> getter, Operator operator, V val) {
|
||||
|
||||
addIfFilter(Filter.create(getter, operator, val));
|
||||
addIfFilter(Filter.create(getter, operator, val));
|
||||
|
||||
return (O) this;
|
||||
}
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public <V> O onlyIf(Filter<V> filter) {
|
||||
public <V> O onlyIf(Filter<V> filter) {
|
||||
|
||||
addIfFilter(filter);
|
||||
addIfFilter(filter);
|
||||
|
||||
return (O) this;
|
||||
}
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
private void addFilter(Filter<?> filter) {
|
||||
if (filters == null) {
|
||||
filters = new LinkedList<Filter<?>>();
|
||||
}
|
||||
filters.add(filter);
|
||||
}
|
||||
private void addFilter(Filter<?> filter) {
|
||||
if (filters == null) {
|
||||
filters = new LinkedList<Filter<?>>();
|
||||
}
|
||||
filters.add(filter);
|
||||
}
|
||||
|
||||
private void addIfFilter(Filter<?> filter) {
|
||||
if (ifFilters == null) {
|
||||
ifFilters = new LinkedList<Filter<?>>();
|
||||
}
|
||||
ifFilters.add(filter);
|
||||
}
|
||||
private void addIfFilter(Filter<?> filter) {
|
||||
if (ifFilters == null) {
|
||||
ifFilters = new LinkedList<Filter<?>>();
|
||||
}
|
||||
ifFilters.add(filter);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,94 +19,95 @@ import java.util.LinkedHashMap;
|
|||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import net.helenus.core.*;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
|
||||
public abstract class AbstractFilterOptionalOperation<
|
||||
E, O extends AbstractFilterOptionalOperation<E, O>>
|
||||
extends AbstractOptionalOperation<E, O> {
|
||||
public abstract class AbstractFilterOptionalOperation<E, O extends AbstractFilterOptionalOperation<E, O>>
|
||||
extends
|
||||
AbstractOptionalOperation<E, O> {
|
||||
|
||||
protected Map<HelenusProperty, Filter<?>> filters = null;
|
||||
protected List<Filter<?>> ifFilters = null;
|
||||
protected Map<HelenusProperty, Filter<?>> filters = null;
|
||||
protected List<Filter<?>> ifFilters = null;
|
||||
|
||||
public AbstractFilterOptionalOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
}
|
||||
public AbstractFilterOptionalOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
}
|
||||
|
||||
public <V> O where(Getter<V> getter, Postulate<V> postulate) {
|
||||
public <V> O where(Getter<V> getter, Postulate<V> postulate) {
|
||||
|
||||
addFilter(Filter.create(getter, postulate));
|
||||
addFilter(Filter.create(getter, postulate));
|
||||
|
||||
return (O) this;
|
||||
}
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public <V> O where(Getter<V> getter, Operator operator, V val) {
|
||||
public <V> O where(Getter<V> getter, Operator operator, V val) {
|
||||
|
||||
addFilter(Filter.create(getter, operator, val));
|
||||
addFilter(Filter.create(getter, operator, val));
|
||||
|
||||
return (O) this;
|
||||
}
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public <V> O where(Filter<V> filter) {
|
||||
public <V> O where(Filter<V> filter) {
|
||||
|
||||
addFilter(filter);
|
||||
addFilter(filter);
|
||||
|
||||
return (O) this;
|
||||
}
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public <V> O and(Getter<V> getter, Postulate<V> postulate) {
|
||||
public <V> O and(Getter<V> getter, Postulate<V> postulate) {
|
||||
|
||||
addFilter(Filter.create(getter, postulate));
|
||||
addFilter(Filter.create(getter, postulate));
|
||||
|
||||
return (O) this;
|
||||
}
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public <V> O and(Getter<V> getter, Operator operator, V val) {
|
||||
public <V> O and(Getter<V> getter, Operator operator, V val) {
|
||||
|
||||
addFilter(Filter.create(getter, operator, val));
|
||||
addFilter(Filter.create(getter, operator, val));
|
||||
|
||||
return (O) this;
|
||||
}
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public <V> O and(Filter<V> filter) {
|
||||
public <V> O and(Filter<V> filter) {
|
||||
|
||||
addFilter(filter);
|
||||
addFilter(filter);
|
||||
|
||||
return (O) this;
|
||||
}
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public <V> O onlyIf(Getter<V> getter, Postulate<V> postulate) {
|
||||
public <V> O onlyIf(Getter<V> getter, Postulate<V> postulate) {
|
||||
|
||||
addIfFilter(Filter.create(getter, postulate));
|
||||
addIfFilter(Filter.create(getter, postulate));
|
||||
|
||||
return (O) this;
|
||||
}
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public <V> O onlyIf(Getter<V> getter, Operator operator, V val) {
|
||||
public <V> O onlyIf(Getter<V> getter, Operator operator, V val) {
|
||||
|
||||
addIfFilter(Filter.create(getter, operator, val));
|
||||
addIfFilter(Filter.create(getter, operator, val));
|
||||
|
||||
return (O) this;
|
||||
}
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public <V> O onlyIf(Filter<V> filter) {
|
||||
public <V> O onlyIf(Filter<V> filter) {
|
||||
|
||||
addIfFilter(filter);
|
||||
addIfFilter(filter);
|
||||
|
||||
return (O) this;
|
||||
}
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
private void addFilter(Filter<?> filter) {
|
||||
if (filters == null) {
|
||||
filters = new LinkedHashMap<HelenusProperty, Filter<?>>();
|
||||
}
|
||||
filters.put(filter.getNode().getProperty(), filter);
|
||||
}
|
||||
private void addFilter(Filter<?> filter) {
|
||||
if (filters == null) {
|
||||
filters = new LinkedHashMap<HelenusProperty, Filter<?>>();
|
||||
}
|
||||
filters.put(filter.getNode().getProperty(), filter);
|
||||
}
|
||||
|
||||
private void addIfFilter(Filter<?> filter) {
|
||||
if (ifFilters == null) {
|
||||
ifFilters = new LinkedList<Filter<?>>();
|
||||
}
|
||||
ifFilters.add(filter);
|
||||
}
|
||||
private void addIfFilter(Filter<?> filter) {
|
||||
if (ifFilters == null) {
|
||||
ifFilters = new LinkedList<Filter<?>>();
|
||||
}
|
||||
ifFilters.add(filter);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,94 +19,95 @@ import java.util.LinkedHashMap;
|
|||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import net.helenus.core.*;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
|
||||
public abstract class AbstractFilterStreamOperation<
|
||||
E, O extends AbstractFilterStreamOperation<E, O>>
|
||||
extends AbstractStreamOperation<E, O> {
|
||||
public abstract class AbstractFilterStreamOperation<E, O extends AbstractFilterStreamOperation<E, O>>
|
||||
extends
|
||||
AbstractStreamOperation<E, O> {
|
||||
|
||||
protected Map<HelenusProperty, Filter<?>> filters = null;
|
||||
protected List<Filter<?>> ifFilters = null;
|
||||
protected Map<HelenusProperty, Filter<?>> filters = null;
|
||||
protected List<Filter<?>> ifFilters = null;
|
||||
|
||||
public AbstractFilterStreamOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
}
|
||||
public AbstractFilterStreamOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
}
|
||||
|
||||
public <V> O where(Getter<V> getter, Postulate<V> postulate) {
|
||||
public <V> O where(Getter<V> getter, Postulate<V> postulate) {
|
||||
|
||||
addFilter(Filter.create(getter, postulate));
|
||||
addFilter(Filter.create(getter, postulate));
|
||||
|
||||
return (O) this;
|
||||
}
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public <V> O where(Getter<V> getter, Operator operator, V val) {
|
||||
public <V> O where(Getter<V> getter, Operator operator, V val) {
|
||||
|
||||
addFilter(Filter.create(getter, operator, val));
|
||||
addFilter(Filter.create(getter, operator, val));
|
||||
|
||||
return (O) this;
|
||||
}
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public <V> O where(Filter<V> filter) {
|
||||
public <V> O where(Filter<V> filter) {
|
||||
|
||||
addFilter(filter);
|
||||
addFilter(filter);
|
||||
|
||||
return (O) this;
|
||||
}
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public <V> O and(Getter<V> getter, Postulate<V> postulate) {
|
||||
public <V> O and(Getter<V> getter, Postulate<V> postulate) {
|
||||
|
||||
addFilter(Filter.create(getter, postulate));
|
||||
addFilter(Filter.create(getter, postulate));
|
||||
|
||||
return (O) this;
|
||||
}
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public <V> O and(Getter<V> getter, Operator operator, V val) {
|
||||
public <V> O and(Getter<V> getter, Operator operator, V val) {
|
||||
|
||||
addFilter(Filter.create(getter, operator, val));
|
||||
addFilter(Filter.create(getter, operator, val));
|
||||
|
||||
return (O) this;
|
||||
}
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public <V> O and(Filter<V> filter) {
|
||||
public <V> O and(Filter<V> filter) {
|
||||
|
||||
addFilter(filter);
|
||||
addFilter(filter);
|
||||
|
||||
return (O) this;
|
||||
}
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public <V> O onlyIf(Getter<V> getter, Postulate<V> postulate) {
|
||||
public <V> O onlyIf(Getter<V> getter, Postulate<V> postulate) {
|
||||
|
||||
addIfFilter(Filter.create(getter, postulate));
|
||||
addIfFilter(Filter.create(getter, postulate));
|
||||
|
||||
return (O) this;
|
||||
}
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public <V> O onlyIf(Getter<V> getter, Operator operator, V val) {
|
||||
public <V> O onlyIf(Getter<V> getter, Operator operator, V val) {
|
||||
|
||||
addIfFilter(Filter.create(getter, operator, val));
|
||||
addIfFilter(Filter.create(getter, operator, val));
|
||||
|
||||
return (O) this;
|
||||
}
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public <V> O onlyIf(Filter<V> filter) {
|
||||
public <V> O onlyIf(Filter<V> filter) {
|
||||
|
||||
addIfFilter(filter);
|
||||
addIfFilter(filter);
|
||||
|
||||
return (O) this;
|
||||
}
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
private void addFilter(Filter<?> filter) {
|
||||
if (filters == null) {
|
||||
filters = new LinkedHashMap<HelenusProperty, Filter<?>>();
|
||||
}
|
||||
filters.put(filter.getNode().getProperty(), filter);
|
||||
}
|
||||
private void addFilter(Filter<?> filter) {
|
||||
if (filters == null) {
|
||||
filters = new LinkedHashMap<HelenusProperty, Filter<?>>();
|
||||
}
|
||||
filters.put(filter.getNode().getProperty(), filter);
|
||||
}
|
||||
|
||||
private void addIfFilter(Filter<?> filter) {
|
||||
if (ifFilters == null) {
|
||||
ifFilters = new LinkedList<Filter<?>>();
|
||||
}
|
||||
ifFilters.add(filter);
|
||||
}
|
||||
private void addIfFilter(Filter<?> filter) {
|
||||
if (ifFilters == null) {
|
||||
ifFilters = new LinkedList<Filter<?>>();
|
||||
}
|
||||
ifFilters.add(filter);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,58 +15,75 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
import com.codahale.metrics.Timer;
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
import net.helenus.core.AbstractSessionOperations;
|
||||
import net.helenus.core.UnitOfWork;
|
||||
|
||||
public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>>
|
||||
extends AbstractStatementOperation<E, O> {
|
||||
public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>> extends AbstractStatementOperation<E, O> {
|
||||
|
||||
public abstract E transform(ResultSet resultSet);
|
||||
public AbstractOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
}
|
||||
|
||||
public boolean cacheable() {
|
||||
return false;
|
||||
}
|
||||
public abstract E transform(ResultSet resultSet);
|
||||
|
||||
public AbstractOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
}
|
||||
public boolean cacheable() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public PreparedOperation<E> prepare() {
|
||||
return new PreparedOperation<E>(prepareStatement(), this);
|
||||
}
|
||||
public PreparedOperation<E> prepare() {
|
||||
return new PreparedOperation<E>(prepareStatement(), this);
|
||||
}
|
||||
|
||||
public E sync() {
|
||||
final Timer.Context context = requestLatency.time();
|
||||
try {
|
||||
ResultSet resultSet = this.execute(sessionOps, null, traceContext, showValues, false);
|
||||
return transform(resultSet);
|
||||
} finally {
|
||||
context.stop();
|
||||
}
|
||||
}
|
||||
public E sync() {// throws TimeoutException {
|
||||
final Timer.Context context = requestLatency.time();
|
||||
try {
|
||||
ResultSet resultSet = this.execute(sessionOps, null, traceContext, queryExecutionTimeout, queryTimeoutUnits,
|
||||
showValues, false);
|
||||
return transform(resultSet);
|
||||
} finally {
|
||||
context.stop();
|
||||
}
|
||||
}
|
||||
|
||||
public E sync(UnitOfWork uow) {
|
||||
if (uow == null) return sync();
|
||||
public E sync(UnitOfWork uow) {// throws TimeoutException {
|
||||
if (uow == null)
|
||||
return sync();
|
||||
|
||||
final Timer.Context context = requestLatency.time();
|
||||
try {
|
||||
ResultSet resultSet = execute(sessionOps, uow, traceContext, showValues, true);
|
||||
E result = transform(resultSet);
|
||||
return result;
|
||||
} finally {
|
||||
context.stop();
|
||||
}
|
||||
}
|
||||
final Timer.Context context = requestLatency.time();
|
||||
try {
|
||||
ResultSet resultSet = execute(sessionOps, uow, traceContext, queryExecutionTimeout, queryTimeoutUnits,
|
||||
showValues, true);
|
||||
E result = transform(resultSet);
|
||||
return result;
|
||||
} finally {
|
||||
context.stop();
|
||||
}
|
||||
}
|
||||
|
||||
public CompletableFuture<E> async() {
|
||||
return CompletableFuture.<E>supplyAsync(() -> sync());
|
||||
}
|
||||
public CompletableFuture<E> async() {
|
||||
return CompletableFuture.<E>supplyAsync(() -> {
|
||||
// try {
|
||||
return sync();
|
||||
// } catch (TimeoutException ex) {
|
||||
// throw new CompletionException(ex);
|
||||
// }
|
||||
});
|
||||
}
|
||||
|
||||
public CompletableFuture<E> async(UnitOfWork uow) {
|
||||
if (uow == null) return async();
|
||||
return CompletableFuture.<E>supplyAsync(() -> sync(uow));
|
||||
}
|
||||
public CompletableFuture<E> async(UnitOfWork uow) {
|
||||
if (uow == null)
|
||||
return async();
|
||||
return CompletableFuture.<E>supplyAsync(() -> {
|
||||
// try {
|
||||
return sync();
|
||||
// } catch (TimeoutException ex) {
|
||||
// throw new CompletionException(ex);
|
||||
// }
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,101 +15,158 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
import com.codahale.metrics.Timer;
|
||||
import com.datastax.driver.core.PreparedStatement;
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.base.Stopwatch;
|
||||
import com.google.common.util.concurrent.Futures;
|
||||
import com.google.common.util.concurrent.ListenableFuture;
|
||||
import java.util.HashSet;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
import net.helenus.core.AbstractSessionOperations;
|
||||
import net.helenus.core.UnitOfWork;
|
||||
import net.helenus.core.cache.CacheUtil;
|
||||
import net.helenus.core.cache.Facet;
|
||||
|
||||
public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOperation<E, O>>
|
||||
extends AbstractStatementOperation<E, O> {
|
||||
extends
|
||||
AbstractStatementOperation<E, O> {
|
||||
|
||||
public AbstractOptionalOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
}
|
||||
public AbstractOptionalOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
}
|
||||
|
||||
public abstract Optional<E> transform(ResultSet resultSet);
|
||||
public abstract Optional<E> transform(ResultSet resultSet);
|
||||
|
||||
public PreparedOptionalOperation<E> prepare() {
|
||||
return new PreparedOptionalOperation<E>(prepareStatement(), this);
|
||||
}
|
||||
public PreparedOptionalOperation<E> prepare() {
|
||||
return new PreparedOptionalOperation<E>(prepareStatement(), this);
|
||||
}
|
||||
|
||||
public ListenableFuture<PreparedOptionalOperation<E>> prepareAsync() {
|
||||
final O _this = (O) this;
|
||||
return Futures.transform(
|
||||
prepareStatementAsync(),
|
||||
new Function<PreparedStatement, PreparedOptionalOperation<E>>() {
|
||||
@Override
|
||||
public PreparedOptionalOperation<E> apply(PreparedStatement preparedStatement) {
|
||||
return new PreparedOptionalOperation<E>(preparedStatement, _this);
|
||||
}
|
||||
});
|
||||
}
|
||||
public ListenableFuture<PreparedOptionalOperation<E>> prepareAsync() {
|
||||
final O _this = (O) this;
|
||||
return Futures.transform(prepareStatementAsync(),
|
||||
new Function<PreparedStatement, PreparedOptionalOperation<E>>() {
|
||||
@Override
|
||||
public PreparedOptionalOperation<E> apply(PreparedStatement preparedStatement) {
|
||||
return new PreparedOptionalOperation<E>(preparedStatement, _this);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public Optional<E> sync() {
|
||||
final Timer.Context context = requestLatency.time();
|
||||
try {
|
||||
ResultSet resultSet = this.execute(sessionOps, null, traceContext, showValues, false);
|
||||
return transform(resultSet);
|
||||
} finally {
|
||||
context.stop();
|
||||
}
|
||||
}
|
||||
public Optional<E> sync() {// throws TimeoutException {
|
||||
final Timer.Context context = requestLatency.time();
|
||||
try {
|
||||
Optional<E> result = Optional.empty();
|
||||
E cacheResult = null;
|
||||
boolean updateCache = isSessionCacheable();
|
||||
|
||||
public Optional<E> sync(UnitOfWork uow) {
|
||||
if (uow == null) return sync();
|
||||
if (enableCache && isSessionCacheable()) {
|
||||
List<Facet> facets = bindFacetValues();
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
cacheResult = (E) sessionOps.checkCache(tableName, facets);
|
||||
if (cacheResult != null) {
|
||||
result = Optional.of(cacheResult);
|
||||
updateCache = false;
|
||||
}
|
||||
}
|
||||
|
||||
final Timer.Context context = requestLatency.time();
|
||||
try {
|
||||
if (!result.isPresent()) {
|
||||
// Formulate the query and execute it against the Cassandra cluster.
|
||||
ResultSet resultSet = this.execute(sessionOps, null, traceContext, queryExecutionTimeout,
|
||||
queryTimeoutUnits, showValues, false);
|
||||
|
||||
Optional<E> result = null;
|
||||
String key = getStatementCacheKey();
|
||||
if (enableCache && key != null) {
|
||||
Set<E> cachedResult = (Set<E>) uow.cacheLookup(key);
|
||||
if (cachedResult != null) {
|
||||
//TODO(gburd): what about select ResultSet, Tuple... etc.?
|
||||
uowCacheHits.mark();
|
||||
logger.info("UOW({}) cache hit, {}", uow.hashCode(), key);
|
||||
result = cachedResult.stream().findFirst();
|
||||
} else {
|
||||
uowCacheMiss.mark();
|
||||
}
|
||||
}
|
||||
// Transform the query result set into the desired shape.
|
||||
result = transform(resultSet);
|
||||
}
|
||||
|
||||
if (result == null) {
|
||||
ResultSet resultSet = execute(sessionOps, uow, traceContext, showValues, true);
|
||||
result = transform(resultSet);
|
||||
if (updateCache && result.isPresent()) {
|
||||
List<Facet> facets = getFacets();
|
||||
if (facets != null && facets.size() > 1) {
|
||||
sessionOps.updateCache(result.get(), facets);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
} finally {
|
||||
context.stop();
|
||||
}
|
||||
}
|
||||
|
||||
if (key != null) {
|
||||
if (result.isPresent()) {
|
||||
Set<Object> set = new HashSet<Object>(1);
|
||||
set.add(result.get());
|
||||
uow.getCache().put(key, set);
|
||||
} else {
|
||||
uow.getCache().put(key, new HashSet<Object>(0));
|
||||
}
|
||||
}
|
||||
}
|
||||
public Optional<E> sync(UnitOfWork<?> uow) {// throws TimeoutException {
|
||||
if (uow == null)
|
||||
return sync();
|
||||
|
||||
return result;
|
||||
} finally {
|
||||
context.stop();
|
||||
}
|
||||
}
|
||||
final Timer.Context context = requestLatency.time();
|
||||
try {
|
||||
|
||||
public CompletableFuture<Optional<E>> async() {
|
||||
return CompletableFuture.<Optional<E>>supplyAsync(() -> sync());
|
||||
}
|
||||
Optional<E> result = Optional.empty();
|
||||
E cacheResult = null;
|
||||
boolean updateCache = true;
|
||||
|
||||
public CompletableFuture<Optional<E>> async(UnitOfWork uow) {
|
||||
if (uow == null) return async();
|
||||
return CompletableFuture.<Optional<E>>supplyAsync(() -> sync(uow));
|
||||
}
|
||||
if (enableCache) {
|
||||
Stopwatch timer = uow.getCacheLookupTimer();
|
||||
timer.start();
|
||||
List<Facet> facets = bindFacetValues();
|
||||
cacheResult = checkCache(uow, facets);
|
||||
if (cacheResult != null) {
|
||||
result = Optional.of(cacheResult);
|
||||
updateCache = false;
|
||||
} else {
|
||||
if (isSessionCacheable()) {
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
cacheResult = (E) sessionOps.checkCache(tableName, facets);
|
||||
if (cacheResult != null) {
|
||||
result = Optional.of(cacheResult);
|
||||
}
|
||||
}
|
||||
}
|
||||
timer.stop();
|
||||
}
|
||||
|
||||
if (!result.isPresent()) {
|
||||
// Formulate the query and execute it against the Cassandra cluster.
|
||||
ResultSet resultSet = execute(sessionOps, uow, traceContext, queryExecutionTimeout, queryTimeoutUnits,
|
||||
showValues, true);
|
||||
|
||||
// Transform the query result set into the desired shape.
|
||||
result = transform(resultSet);
|
||||
}
|
||||
|
||||
// If we have a result, it wasn't from the UOW cache, and we're caching things
|
||||
// then we
|
||||
// need to put this result into the cache for future requests to find.
|
||||
if (updateCache && result.isPresent()) {
|
||||
updateCache(uow, result.get(), getFacets());
|
||||
}
|
||||
|
||||
return result;
|
||||
} finally {
|
||||
context.stop();
|
||||
}
|
||||
}
|
||||
|
||||
public CompletableFuture<Optional<E>> async() {
|
||||
return CompletableFuture.<Optional<E>>supplyAsync(() -> {
|
||||
// try {
|
||||
return sync();
|
||||
// } catch (TimeoutException ex) {
|
||||
// throw new CompletionException(ex);
|
||||
// }
|
||||
});
|
||||
}
|
||||
|
||||
public CompletableFuture<Optional<E>> async(UnitOfWork<?> uow) {
|
||||
if (uow == null)
|
||||
return async();
|
||||
return CompletableFuture.<Optional<E>>supplyAsync(() -> {
|
||||
// try {
|
||||
return sync();
|
||||
// } catch (TimeoutException ex) {
|
||||
// throw new CompletionException(ex);
|
||||
// }
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,8 +15,15 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import brave.Tracer;
|
||||
import brave.propagation.TraceContext;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.datastax.driver.core.ConsistencyLevel;
|
||||
import com.datastax.driver.core.PreparedStatement;
|
||||
import com.datastax.driver.core.RegularStatement;
|
||||
|
@ -27,268 +34,335 @@ import com.datastax.driver.core.policies.FallthroughRetryPolicy;
|
|||
import com.datastax.driver.core.policies.RetryPolicy;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
import com.google.common.util.concurrent.ListenableFuture;
|
||||
|
||||
import brave.Tracer;
|
||||
import brave.propagation.TraceContext;
|
||||
import net.helenus.core.AbstractSessionOperations;
|
||||
import net.helenus.core.UnitOfWork;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.core.cache.UnboundFacet;
|
||||
import net.helenus.core.reflect.MapExportable;
|
||||
import net.helenus.mapping.value.BeanColumnValueProvider;
|
||||
import net.helenus.support.HelenusException;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public abstract class AbstractStatementOperation<E, O extends AbstractStatementOperation<E, O>>
|
||||
extends Operation<E> {
|
||||
public abstract class AbstractStatementOperation<E, O extends AbstractStatementOperation<E, O>> extends Operation<E> {
|
||||
|
||||
final Logger logger = LoggerFactory.getLogger(getClass());
|
||||
private static final Logger LOG = LoggerFactory.getLogger(AbstractStatementOperation.class);
|
||||
|
||||
public abstract Statement buildStatement(boolean cached);
|
||||
protected boolean enableCache = true;
|
||||
protected boolean showValues = true;
|
||||
protected TraceContext traceContext;
|
||||
long queryExecutionTimeout = 10;
|
||||
TimeUnit queryTimeoutUnits = TimeUnit.SECONDS;
|
||||
private ConsistencyLevel consistencyLevel;
|
||||
private ConsistencyLevel serialConsistencyLevel;
|
||||
private RetryPolicy retryPolicy;
|
||||
private boolean idempotent = false;
|
||||
private boolean enableTracing = false;
|
||||
private long[] defaultTimestamp = null;
|
||||
private int[] fetchSize = null;
|
||||
|
||||
protected boolean enableCache = true;
|
||||
protected boolean showValues = true;
|
||||
protected TraceContext traceContext;
|
||||
private ConsistencyLevel consistencyLevel;
|
||||
private ConsistencyLevel serialConsistencyLevel;
|
||||
private RetryPolicy retryPolicy;
|
||||
private boolean idempotent = false;
|
||||
private boolean enableTracing = false;
|
||||
private long[] defaultTimestamp = null;
|
||||
private int[] fetchSize = null;
|
||||
public AbstractStatementOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
this.consistencyLevel = sessionOperations.getDefaultConsistencyLevel();
|
||||
this.idempotent = sessionOperations.getDefaultQueryIdempotency();
|
||||
}
|
||||
|
||||
public AbstractStatementOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
this.consistencyLevel = sessionOperations.getDefaultConsistencyLevel();
|
||||
this.idempotent = sessionOperations.getDefaultQueryIdempotency();
|
||||
}
|
||||
public abstract Statement buildStatement(boolean cached);
|
||||
|
||||
public O ignoreCache(boolean enabled) {
|
||||
enableCache = enabled;
|
||||
return (O) this;
|
||||
}
|
||||
public O ignoreCache(boolean enabled) {
|
||||
enableCache = enabled;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public O ignoreCache() {
|
||||
enableCache = true;
|
||||
return (O) this;
|
||||
}
|
||||
public O ignoreCache() {
|
||||
enableCache = true;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public O showValues(boolean enabled) {
|
||||
this.showValues = enabled;
|
||||
return (O) this;
|
||||
}
|
||||
public O showValues(boolean enabled) {
|
||||
this.showValues = enabled;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public O defaultTimestamp(long timestamp) {
|
||||
this.defaultTimestamp = new long[1];
|
||||
this.defaultTimestamp[0] = timestamp;
|
||||
return (O) this;
|
||||
}
|
||||
public O defaultTimestamp(long timestamp) {
|
||||
this.defaultTimestamp = new long[1];
|
||||
this.defaultTimestamp[0] = timestamp;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public O retryPolicy(RetryPolicy retryPolicy) {
|
||||
this.retryPolicy = retryPolicy;
|
||||
return (O) this;
|
||||
}
|
||||
public O retryPolicy(RetryPolicy retryPolicy) {
|
||||
this.retryPolicy = retryPolicy;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public O defaultRetryPolicy() {
|
||||
this.retryPolicy = DefaultRetryPolicy.INSTANCE;
|
||||
return (O) this;
|
||||
}
|
||||
public O defaultRetryPolicy() {
|
||||
this.retryPolicy = DefaultRetryPolicy.INSTANCE;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public O idempotent() {
|
||||
this.idempotent = true;
|
||||
return (O) this;
|
||||
}
|
||||
public O idempotent() {
|
||||
this.idempotent = true;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public O isIdempotent(boolean idempotent) {
|
||||
this.idempotent = idempotent;
|
||||
return (O) this;
|
||||
}
|
||||
public O isIdempotent(boolean idempotent) {
|
||||
this.idempotent = idempotent;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public O downgradingConsistencyRetryPolicy() {
|
||||
this.retryPolicy = DowngradingConsistencyRetryPolicy.INSTANCE;
|
||||
return (O) this;
|
||||
}
|
||||
public O downgradingConsistencyRetryPolicy() {
|
||||
this.retryPolicy = DowngradingConsistencyRetryPolicy.INSTANCE;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public O fallthroughRetryPolicy() {
|
||||
this.retryPolicy = FallthroughRetryPolicy.INSTANCE;
|
||||
return (O) this;
|
||||
}
|
||||
public O fallthroughRetryPolicy() {
|
||||
this.retryPolicy = FallthroughRetryPolicy.INSTANCE;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public O consistency(ConsistencyLevel level) {
|
||||
this.consistencyLevel = level;
|
||||
return (O) this;
|
||||
}
|
||||
public O consistency(ConsistencyLevel level) {
|
||||
this.consistencyLevel = level;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public O consistencyAny() {
|
||||
this.consistencyLevel = ConsistencyLevel.ANY;
|
||||
return (O) this;
|
||||
}
|
||||
public O consistencyAny() {
|
||||
this.consistencyLevel = ConsistencyLevel.ANY;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public O consistencyOne() {
|
||||
this.consistencyLevel = ConsistencyLevel.ONE;
|
||||
return (O) this;
|
||||
}
|
||||
public O consistencyOne() {
|
||||
this.consistencyLevel = ConsistencyLevel.ONE;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public O consistencyQuorum() {
|
||||
this.consistencyLevel = ConsistencyLevel.QUORUM;
|
||||
return (O) this;
|
||||
}
|
||||
public O consistencyQuorum() {
|
||||
this.consistencyLevel = ConsistencyLevel.QUORUM;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public O consistencyAll() {
|
||||
this.consistencyLevel = ConsistencyLevel.ALL;
|
||||
return (O) this;
|
||||
}
|
||||
public O consistencyAll() {
|
||||
this.consistencyLevel = ConsistencyLevel.ALL;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public O consistencyLocalOne() {
|
||||
this.consistencyLevel = ConsistencyLevel.LOCAL_ONE;
|
||||
return (O) this;
|
||||
}
|
||||
public O consistencyLocalOne() {
|
||||
this.consistencyLevel = ConsistencyLevel.LOCAL_ONE;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public O consistencyLocalQuorum() {
|
||||
this.consistencyLevel = ConsistencyLevel.LOCAL_QUORUM;
|
||||
return (O) this;
|
||||
}
|
||||
public O consistencyLocalQuorum() {
|
||||
this.consistencyLevel = ConsistencyLevel.LOCAL_QUORUM;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public O consistencyEachQuorum() {
|
||||
this.consistencyLevel = ConsistencyLevel.EACH_QUORUM;
|
||||
return (O) this;
|
||||
}
|
||||
public O consistencyEachQuorum() {
|
||||
this.consistencyLevel = ConsistencyLevel.EACH_QUORUM;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public O serialConsistency(ConsistencyLevel level) {
|
||||
this.serialConsistencyLevel = level;
|
||||
return (O) this;
|
||||
}
|
||||
public O serialConsistency(ConsistencyLevel level) {
|
||||
this.serialConsistencyLevel = level;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public O serialConsistencyAny() {
|
||||
this.serialConsistencyLevel = ConsistencyLevel.ANY;
|
||||
return (O) this;
|
||||
}
|
||||
public O serialConsistencyAny() {
|
||||
this.serialConsistencyLevel = ConsistencyLevel.ANY;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public O serialConsistencyOne() {
|
||||
this.serialConsistencyLevel = ConsistencyLevel.ONE;
|
||||
return (O) this;
|
||||
}
|
||||
public O serialConsistencyOne() {
|
||||
this.serialConsistencyLevel = ConsistencyLevel.ONE;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public O serialConsistencyQuorum() {
|
||||
this.serialConsistencyLevel = ConsistencyLevel.QUORUM;
|
||||
return (O) this;
|
||||
}
|
||||
public O serialConsistencyQuorum() {
|
||||
this.serialConsistencyLevel = ConsistencyLevel.QUORUM;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public O serialConsistencyAll() {
|
||||
this.serialConsistencyLevel = ConsistencyLevel.ALL;
|
||||
return (O) this;
|
||||
}
|
||||
public O serialConsistencyAll() {
|
||||
this.serialConsistencyLevel = ConsistencyLevel.ALL;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public O serialConsistencyLocal() {
|
||||
this.serialConsistencyLevel = ConsistencyLevel.LOCAL_SERIAL;
|
||||
return (O) this;
|
||||
}
|
||||
public O serialConsistencyLocal() {
|
||||
this.serialConsistencyLevel = ConsistencyLevel.LOCAL_SERIAL;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public O serialConsistencyLocalQuorum() {
|
||||
this.serialConsistencyLevel = ConsistencyLevel.LOCAL_QUORUM;
|
||||
return (O) this;
|
||||
}
|
||||
public O serialConsistencyLocalQuorum() {
|
||||
this.serialConsistencyLevel = ConsistencyLevel.LOCAL_QUORUM;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public O disableTracing() {
|
||||
this.enableTracing = false;
|
||||
return (O) this;
|
||||
}
|
||||
public O disableTracing() {
|
||||
this.enableTracing = false;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public O enableTracing() {
|
||||
this.enableTracing = true;
|
||||
return (O) this;
|
||||
}
|
||||
public O enableTracing() {
|
||||
this.enableTracing = true;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public O tracing(boolean enable) {
|
||||
this.enableTracing = enable;
|
||||
return (O) this;
|
||||
}
|
||||
public O tracing(boolean enable) {
|
||||
this.enableTracing = enable;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public O fetchSize(int fetchSize) {
|
||||
this.fetchSize = new int[1];
|
||||
this.fetchSize[0] = fetchSize;
|
||||
return (O) this;
|
||||
}
|
||||
public O fetchSize(int fetchSize) {
|
||||
this.fetchSize = new int[1];
|
||||
this.fetchSize[0] = fetchSize;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public Statement options(Statement statement) {
|
||||
public O queryTimeoutMs(long ms) {
|
||||
this.queryExecutionTimeout = ms;
|
||||
this.queryTimeoutUnits = TimeUnit.MILLISECONDS;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
if (defaultTimestamp != null) {
|
||||
statement.setDefaultTimestamp(defaultTimestamp[0]);
|
||||
}
|
||||
public O queryTimeout(long timeout, TimeUnit units) {
|
||||
this.queryExecutionTimeout = timeout;
|
||||
this.queryTimeoutUnits = units;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
if (consistencyLevel != null) {
|
||||
statement.setConsistencyLevel(consistencyLevel);
|
||||
}
|
||||
public Statement options(Statement statement) {
|
||||
|
||||
if (serialConsistencyLevel != null) {
|
||||
statement.setSerialConsistencyLevel(serialConsistencyLevel);
|
||||
}
|
||||
if (defaultTimestamp != null) {
|
||||
statement.setDefaultTimestamp(defaultTimestamp[0]);
|
||||
}
|
||||
|
||||
if (retryPolicy != null) {
|
||||
statement.setRetryPolicy(retryPolicy);
|
||||
}
|
||||
if (consistencyLevel != null) {
|
||||
statement.setConsistencyLevel(consistencyLevel);
|
||||
}
|
||||
|
||||
if (enableTracing) {
|
||||
statement.enableTracing();
|
||||
} else {
|
||||
statement.disableTracing();
|
||||
}
|
||||
if (serialConsistencyLevel != null) {
|
||||
statement.setSerialConsistencyLevel(serialConsistencyLevel);
|
||||
}
|
||||
|
||||
if (fetchSize != null) {
|
||||
statement.setFetchSize(fetchSize[0]);
|
||||
}
|
||||
if (retryPolicy != null) {
|
||||
statement.setRetryPolicy(retryPolicy);
|
||||
}
|
||||
|
||||
if (idempotent) {
|
||||
statement.setIdempotent(true);
|
||||
}
|
||||
if (enableTracing) {
|
||||
statement.enableTracing();
|
||||
} else {
|
||||
statement.disableTracing();
|
||||
}
|
||||
|
||||
return statement;
|
||||
}
|
||||
if (fetchSize != null) {
|
||||
statement.setFetchSize(fetchSize[0]);
|
||||
}
|
||||
|
||||
public O zipkinContext(TraceContext traceContext) {
|
||||
if (traceContext != null) {
|
||||
Tracer tracer = this.sessionOps.getZipkinTracer();
|
||||
if (tracer != null) {
|
||||
this.traceContext = traceContext;
|
||||
}
|
||||
}
|
||||
if (idempotent) {
|
||||
statement.setIdempotent(true);
|
||||
}
|
||||
|
||||
return (O) this;
|
||||
}
|
||||
return statement;
|
||||
}
|
||||
|
||||
public Statement statement() {
|
||||
return buildStatement(false);
|
||||
}
|
||||
public O zipkinContext(TraceContext traceContext) {
|
||||
if (traceContext != null) {
|
||||
Tracer tracer = this.sessionOps.getZipkinTracer();
|
||||
if (tracer != null) {
|
||||
this.traceContext = traceContext;
|
||||
}
|
||||
}
|
||||
|
||||
public String cql() {
|
||||
Statement statement = buildStatement(false);
|
||||
if (statement == null) return "";
|
||||
if (statement instanceof BuiltStatement) {
|
||||
BuiltStatement buildStatement = (BuiltStatement) statement;
|
||||
return buildStatement.setForceNoValues(true).getQueryString();
|
||||
} else {
|
||||
return statement.toString();
|
||||
}
|
||||
}
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public PreparedStatement prepareStatement() {
|
||||
public Statement statement() {
|
||||
return buildStatement(false);
|
||||
}
|
||||
|
||||
Statement statement = buildStatement(true);
|
||||
public String cql() {
|
||||
Statement statement = buildStatement(false);
|
||||
if (statement == null)
|
||||
return "";
|
||||
if (statement instanceof BuiltStatement) {
|
||||
BuiltStatement buildStatement = (BuiltStatement) statement;
|
||||
return buildStatement.setForceNoValues(true).getQueryString();
|
||||
} else {
|
||||
return statement.toString();
|
||||
}
|
||||
}
|
||||
|
||||
if (statement instanceof RegularStatement) {
|
||||
public PreparedStatement prepareStatement() {
|
||||
|
||||
RegularStatement regularStatement = (RegularStatement) statement;
|
||||
Statement statement = buildStatement(true);
|
||||
|
||||
return sessionOps.prepare(regularStatement);
|
||||
}
|
||||
if (statement instanceof RegularStatement) {
|
||||
|
||||
throw new HelenusException("only RegularStatements can be prepared");
|
||||
}
|
||||
RegularStatement regularStatement = (RegularStatement) statement;
|
||||
|
||||
public ListenableFuture<PreparedStatement> prepareStatementAsync() {
|
||||
return sessionOps.prepare(regularStatement);
|
||||
}
|
||||
|
||||
Statement statement = buildStatement(true);
|
||||
throw new HelenusException("only RegularStatements can be prepared");
|
||||
}
|
||||
|
||||
if (statement instanceof RegularStatement) {
|
||||
public ListenableFuture<PreparedStatement> prepareStatementAsync() {
|
||||
|
||||
RegularStatement regularStatement = (RegularStatement) statement;
|
||||
Statement statement = buildStatement(true);
|
||||
|
||||
return sessionOps.prepareAsync(regularStatement);
|
||||
}
|
||||
if (statement instanceof RegularStatement) {
|
||||
|
||||
throw new HelenusException("only RegularStatements can be prepared");
|
||||
}
|
||||
RegularStatement regularStatement = (RegularStatement) statement;
|
||||
|
||||
return sessionOps.prepareAsync(regularStatement);
|
||||
}
|
||||
|
||||
throw new HelenusException("only RegularStatements can be prepared");
|
||||
}
|
||||
|
||||
protected E checkCache(UnitOfWork<?> uow, List<Facet> facets) {
|
||||
E result = null;
|
||||
Optional<Object> optionalCachedResult = Optional.empty();
|
||||
|
||||
if (!facets.isEmpty()) {
|
||||
optionalCachedResult = uow.cacheLookup(facets);
|
||||
if (optionalCachedResult.isPresent()) {
|
||||
uowCacheHits.mark();
|
||||
LOG.info("UnitOfWork({}) cache hit using facets", uow.hashCode());
|
||||
result = (E) optionalCachedResult.get();
|
||||
}
|
||||
}
|
||||
|
||||
if (result == null) {
|
||||
uowCacheMiss.mark();
|
||||
LOG.info("UnitOfWork({}) cache miss", uow.hashCode());
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
protected void updateCache(UnitOfWork<?> uow, E pojo, List<Facet> identifyingFacets) {
|
||||
List<Facet> facets = new ArrayList<>();
|
||||
Map<String, Object> valueMap = pojo instanceof MapExportable ? ((MapExportable) pojo).toMap() : null;
|
||||
|
||||
for (Facet facet : identifyingFacets) {
|
||||
if (facet instanceof UnboundFacet) {
|
||||
UnboundFacet unboundFacet = (UnboundFacet) facet;
|
||||
UnboundFacet.Binder binder = unboundFacet.binder();
|
||||
unboundFacet.getProperties().forEach(prop -> {
|
||||
if (valueMap == null) {
|
||||
Object value = BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
binder.setValueForProperty(prop, value.toString());
|
||||
} else {
|
||||
binder.setValueForProperty(prop, valueMap.get(prop.getPropertyName()).toString());
|
||||
}
|
||||
facets.add(binder.bind());
|
||||
});
|
||||
} else {
|
||||
facets.add(facet);
|
||||
}
|
||||
}
|
||||
|
||||
// Cache the value (pojo), the statement key, and the fully bound facets.
|
||||
uow.cacheUpdate(pojo, facets);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,93 +15,158 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import com.codahale.metrics.Timer;
|
||||
import com.datastax.driver.core.PreparedStatement;
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.base.Stopwatch;
|
||||
import com.google.common.util.concurrent.Futures;
|
||||
import com.google.common.util.concurrent.ListenableFuture;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import net.helenus.core.AbstractSessionOperations;
|
||||
import net.helenus.core.UnitOfWork;
|
||||
import net.helenus.core.cache.CacheUtil;
|
||||
import net.helenus.core.cache.Facet;
|
||||
|
||||
public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperation<E, O>>
|
||||
extends AbstractStatementOperation<E, O> {
|
||||
extends
|
||||
AbstractStatementOperation<E, O> {
|
||||
|
||||
public AbstractStreamOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
}
|
||||
public AbstractStreamOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
}
|
||||
|
||||
public abstract Stream<E> transform(ResultSet resultSet);
|
||||
public abstract Stream<E> transform(ResultSet resultSet);
|
||||
|
||||
public PreparedStreamOperation<E> prepare() {
|
||||
return new PreparedStreamOperation<E>(prepareStatement(), this);
|
||||
}
|
||||
public PreparedStreamOperation<E> prepare() {
|
||||
return new PreparedStreamOperation<E>(prepareStatement(), this);
|
||||
}
|
||||
|
||||
public ListenableFuture<PreparedStreamOperation<E>> prepareAsync() {
|
||||
final O _this = (O) this;
|
||||
return Futures.transform(
|
||||
prepareStatementAsync(),
|
||||
new Function<PreparedStatement, PreparedStreamOperation<E>>() {
|
||||
@Override
|
||||
public PreparedStreamOperation<E> apply(PreparedStatement preparedStatement) {
|
||||
return new PreparedStreamOperation<E>(preparedStatement, _this);
|
||||
}
|
||||
});
|
||||
}
|
||||
public ListenableFuture<PreparedStreamOperation<E>> prepareAsync() {
|
||||
final O _this = (O) this;
|
||||
return Futures.transform(prepareStatementAsync(),
|
||||
new Function<PreparedStatement, PreparedStreamOperation<E>>() {
|
||||
@Override
|
||||
public PreparedStreamOperation<E> apply(PreparedStatement preparedStatement) {
|
||||
return new PreparedStreamOperation<E>(preparedStatement, _this);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public Stream<E> sync() {
|
||||
final Timer.Context context = requestLatency.time();
|
||||
try {
|
||||
ResultSet resultSet = this.execute(sessionOps, null, traceContext, showValues, false);
|
||||
return transform(resultSet);
|
||||
} finally {
|
||||
context.stop();
|
||||
}
|
||||
}
|
||||
public Stream<E> sync() {// throws TimeoutException {
|
||||
final Timer.Context context = requestLatency.time();
|
||||
try {
|
||||
Stream<E> resultStream = null;
|
||||
E cacheResult = null;
|
||||
boolean updateCache = isSessionCacheable();
|
||||
|
||||
public Stream<E> sync(UnitOfWork uow) {
|
||||
if (uow == null) return sync();
|
||||
if (enableCache && isSessionCacheable()) {
|
||||
List<Facet> facets = bindFacetValues();
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
cacheResult = (E) sessionOps.checkCache(tableName, facets);
|
||||
if (cacheResult != null) {
|
||||
resultStream = Stream.of(cacheResult);
|
||||
updateCache = false;
|
||||
}
|
||||
}
|
||||
|
||||
final Timer.Context context = requestLatency.time();
|
||||
try {
|
||||
Stream<E> result = null;
|
||||
String key = getStatementCacheKey();
|
||||
if (enableCache && key != null) {
|
||||
Set<E> cachedResult = (Set<E>) uow.cacheLookup(key);
|
||||
if (cachedResult != null) {
|
||||
//TODO(gburd): what about select ResultSet, Tuple... etc.?
|
||||
uowCacheHits.mark();
|
||||
logger.info("UOW({}) cache hit, {}", uow.hashCode());
|
||||
result = cachedResult.stream();
|
||||
} else {
|
||||
uowCacheMiss.mark();
|
||||
}
|
||||
}
|
||||
if (resultStream == null) {
|
||||
// Formulate the query and execute it against the Cassandra cluster.
|
||||
ResultSet resultSet = this.execute(sessionOps, null, traceContext, queryExecutionTimeout,
|
||||
queryTimeoutUnits, showValues, false);
|
||||
|
||||
if (result == null) {
|
||||
ResultSet resultSet = execute(sessionOps, uow, traceContext, showValues, true);
|
||||
result = transform(resultSet);
|
||||
// Transform the query result set into the desired shape.
|
||||
resultStream = transform(resultSet);
|
||||
}
|
||||
|
||||
if (key != null) {
|
||||
uow.getCache().put(key, (Set<Object>) result);
|
||||
}
|
||||
}
|
||||
if (updateCache && resultStream != null) {
|
||||
List<Facet> facets = getFacets();
|
||||
if (facets != null && facets.size() > 1) {
|
||||
List<E> again = new ArrayList<>();
|
||||
resultStream.forEach(result -> {
|
||||
sessionOps.updateCache(result, facets);
|
||||
again.add(result);
|
||||
});
|
||||
resultStream = again.stream();
|
||||
}
|
||||
}
|
||||
return resultStream;
|
||||
|
||||
return result;
|
||||
} finally {
|
||||
context.stop();
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
context.stop();
|
||||
}
|
||||
}
|
||||
|
||||
public CompletableFuture<Stream<E>> async() {
|
||||
return CompletableFuture.<Stream<E>>supplyAsync(() -> sync());
|
||||
}
|
||||
public Stream<E> sync(UnitOfWork<?> uow) {// throws TimeoutException {
|
||||
if (uow == null)
|
||||
return sync();
|
||||
|
||||
public CompletableFuture<Stream<E>> async(UnitOfWork uow) {
|
||||
if (uow == null) return async();
|
||||
return CompletableFuture.<Stream<E>>supplyAsync(() -> sync(uow));
|
||||
}
|
||||
final Timer.Context context = requestLatency.time();
|
||||
try {
|
||||
Stream<E> resultStream = null;
|
||||
E cachedResult = null;
|
||||
boolean updateCache = true;
|
||||
|
||||
if (enableCache) {
|
||||
Stopwatch timer = uow.getCacheLookupTimer();
|
||||
timer.start();
|
||||
List<Facet> facets = bindFacetValues();
|
||||
cachedResult = checkCache(uow, facets);
|
||||
if (cachedResult != null) {
|
||||
resultStream = Stream.of(cachedResult);
|
||||
updateCache = false;
|
||||
}
|
||||
timer.stop();
|
||||
}
|
||||
|
||||
if (resultStream == null) {
|
||||
ResultSet resultSet = execute(sessionOps, uow, traceContext, queryExecutionTimeout, queryTimeoutUnits,
|
||||
showValues, true);
|
||||
resultStream = transform(resultSet);
|
||||
}
|
||||
|
||||
// If we have a result and we're caching then we need to put it into the cache
|
||||
// for future requests to find.
|
||||
if (updateCache && resultStream != null) {
|
||||
List<E> again = new ArrayList<>();
|
||||
List<Facet> facets = getFacets();
|
||||
resultStream.forEach(result -> {
|
||||
updateCache(uow, result, facets);
|
||||
again.add(result);
|
||||
});
|
||||
resultStream = again.stream();
|
||||
}
|
||||
|
||||
return resultStream;
|
||||
} finally {
|
||||
context.stop();
|
||||
}
|
||||
}
|
||||
|
||||
public CompletableFuture<Stream<E>> async() {
|
||||
return CompletableFuture.<Stream<E>>supplyAsync(() -> {
|
||||
// try {
|
||||
return sync();
|
||||
// } catch (TimeoutException ex) {
|
||||
// throw new CompletionException(ex);
|
||||
// }
|
||||
});
|
||||
}
|
||||
|
||||
public CompletableFuture<Stream<E>> async(UnitOfWork<?> uow) {
|
||||
if (uow == null)
|
||||
return async();
|
||||
return CompletableFuture.<Stream<E>>supplyAsync(() -> {
|
||||
// try {
|
||||
return sync();
|
||||
// } catch (TimeoutException ex) {
|
||||
// throw new CompletionException(ex);
|
||||
// }
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,22 +21,27 @@ import com.datastax.driver.core.Statement;
|
|||
|
||||
public final class BoundOperation<E> extends AbstractOperation<E, BoundOperation<E>> {
|
||||
|
||||
private final BoundStatement boundStatement;
|
||||
private final AbstractOperation<E, ?> delegate;
|
||||
private final BoundStatement boundStatement;
|
||||
private final AbstractOperation<E, ?> delegate;
|
||||
|
||||
public BoundOperation(BoundStatement boundStatement, AbstractOperation<E, ?> operation) {
|
||||
super(operation.sessionOps);
|
||||
this.boundStatement = boundStatement;
|
||||
this.delegate = operation;
|
||||
}
|
||||
public BoundOperation(BoundStatement boundStatement, AbstractOperation<E, ?> operation) {
|
||||
super(operation.sessionOps);
|
||||
this.boundStatement = boundStatement;
|
||||
this.delegate = operation;
|
||||
}
|
||||
|
||||
@Override
|
||||
public E transform(ResultSet resultSet) {
|
||||
return delegate.transform(resultSet);
|
||||
}
|
||||
@Override
|
||||
public E transform(ResultSet resultSet) {
|
||||
return delegate.transform(resultSet);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Statement buildStatement(boolean cached) {
|
||||
return boundStatement;
|
||||
}
|
||||
@Override
|
||||
public Statement buildStatement(boolean cached) {
|
||||
return boundStatement;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isSessionCacheable() {
|
||||
return delegate.isSessionCacheable();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,31 +15,35 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
import com.datastax.driver.core.BoundStatement;
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.Statement;
|
||||
import java.util.Optional;
|
||||
|
||||
public final class BoundOptionalOperation<E>
|
||||
extends AbstractOptionalOperation<E, BoundOptionalOperation<E>> {
|
||||
public final class BoundOptionalOperation<E> extends AbstractOptionalOperation<E, BoundOptionalOperation<E>> {
|
||||
|
||||
private final BoundStatement boundStatement;
|
||||
private final AbstractOptionalOperation<E, ?> delegate;
|
||||
private final BoundStatement boundStatement;
|
||||
private final AbstractOptionalOperation<E, ?> delegate;
|
||||
|
||||
public BoundOptionalOperation(
|
||||
BoundStatement boundStatement, AbstractOptionalOperation<E, ?> operation) {
|
||||
super(operation.sessionOps);
|
||||
this.boundStatement = boundStatement;
|
||||
this.delegate = operation;
|
||||
}
|
||||
public BoundOptionalOperation(BoundStatement boundStatement, AbstractOptionalOperation<E, ?> operation) {
|
||||
super(operation.sessionOps);
|
||||
this.boundStatement = boundStatement;
|
||||
this.delegate = operation;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<E> transform(ResultSet resultSet) {
|
||||
return delegate.transform(resultSet);
|
||||
}
|
||||
@Override
|
||||
public Optional<E> transform(ResultSet resultSet) {
|
||||
return delegate.transform(resultSet);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Statement buildStatement(boolean cached) {
|
||||
return boundStatement;
|
||||
}
|
||||
@Override
|
||||
public Statement buildStatement(boolean cached) {
|
||||
return boundStatement;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isSessionCacheable() {
|
||||
return delegate.isSessionCacheable();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,36 +15,43 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import com.datastax.driver.core.BoundStatement;
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.Statement;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
public final class BoundStreamOperation<E>
|
||||
extends AbstractStreamOperation<E, BoundStreamOperation<E>> {
|
||||
import net.helenus.core.cache.Facet;
|
||||
|
||||
private final BoundStatement boundStatement;
|
||||
private final AbstractStreamOperation<E, ?> delegate;
|
||||
public final class BoundStreamOperation<E> extends AbstractStreamOperation<E, BoundStreamOperation<E>> {
|
||||
|
||||
public BoundStreamOperation(
|
||||
BoundStatement boundStatement, AbstractStreamOperation<E, ?> operation) {
|
||||
super(operation.sessionOps);
|
||||
this.boundStatement = boundStatement;
|
||||
this.delegate = operation;
|
||||
}
|
||||
private final BoundStatement boundStatement;
|
||||
private final AbstractStreamOperation<E, ?> delegate;
|
||||
|
||||
@Override
|
||||
public String getStatementCacheKey() {
|
||||
return delegate.getStatementCacheKey();
|
||||
}
|
||||
public BoundStreamOperation(BoundStatement boundStatement, AbstractStreamOperation<E, ?> operation) {
|
||||
super(operation.sessionOps);
|
||||
this.boundStatement = boundStatement;
|
||||
this.delegate = operation;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Stream<E> transform(ResultSet resultSet) {
|
||||
return delegate.transform(resultSet);
|
||||
}
|
||||
@Override
|
||||
public List<Facet> bindFacetValues() {
|
||||
return delegate.bindFacetValues();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Statement buildStatement(boolean cached) {
|
||||
return boundStatement;
|
||||
}
|
||||
@Override
|
||||
public Stream<E> transform(ResultSet resultSet) {
|
||||
return delegate.transform(resultSet);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Statement buildStatement(boolean cached) {
|
||||
return boundStatement;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isSessionCacheable() {
|
||||
return delegate.isSessionCacheable();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ import com.datastax.driver.core.querybuilder.BuiltStatement;
|
|||
import com.datastax.driver.core.querybuilder.QueryBuilder;
|
||||
import com.datastax.driver.core.querybuilder.Select;
|
||||
import com.datastax.driver.core.querybuilder.Select.Where;
|
||||
|
||||
import net.helenus.core.AbstractSessionOperations;
|
||||
import net.helenus.core.Filter;
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
|
@ -28,56 +29,53 @@ import net.helenus.support.HelenusMappingException;
|
|||
|
||||
public final class CountOperation extends AbstractFilterOperation<Long, CountOperation> {
|
||||
|
||||
private HelenusEntity entity;
|
||||
private HelenusEntity entity;
|
||||
|
||||
public CountOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
}
|
||||
public CountOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
}
|
||||
|
||||
public CountOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity) {
|
||||
super(sessionOperations);
|
||||
this.entity = entity;
|
||||
}
|
||||
public CountOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity) {
|
||||
super(sessionOperations);
|
||||
this.entity = entity;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BuiltStatement buildStatement(boolean cached) {
|
||||
@Override
|
||||
public BuiltStatement buildStatement(boolean cached) {
|
||||
|
||||
if (filters != null && !filters.isEmpty()) {
|
||||
filters.forEach(f -> addPropertyNode(f.getNode()));
|
||||
}
|
||||
if (filters != null && !filters.isEmpty()) {
|
||||
filters.forEach(f -> addPropertyNode(f.getNode()));
|
||||
}
|
||||
|
||||
if (entity == null) {
|
||||
throw new HelenusMappingException("unknown entity");
|
||||
}
|
||||
if (entity == null) {
|
||||
throw new HelenusMappingException("unknown entity");
|
||||
}
|
||||
|
||||
Select select = QueryBuilder.select().countAll().from(entity.getName().toCql());
|
||||
Select select = QueryBuilder.select().countAll().from(entity.getName().toCql());
|
||||
|
||||
if (filters != null && !filters.isEmpty()) {
|
||||
if (filters != null && !filters.isEmpty()) {
|
||||
|
||||
Where where = select.where();
|
||||
Where where = select.where();
|
||||
|
||||
for (Filter<?> filter : filters) {
|
||||
where.and(filter.getClause(sessionOps.getValuePreparer()));
|
||||
}
|
||||
}
|
||||
for (Filter<?> filter : filters) {
|
||||
where.and(filter.getClause(sessionOps.getValuePreparer()));
|
||||
}
|
||||
}
|
||||
|
||||
return select;
|
||||
}
|
||||
return select;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long transform(ResultSet resultSet) {
|
||||
return resultSet.one().getLong(0);
|
||||
}
|
||||
@Override
|
||||
public Long transform(ResultSet resultSet) {
|
||||
return resultSet.one().getLong(0);
|
||||
}
|
||||
|
||||
private void addPropertyNode(HelenusPropertyNode p) {
|
||||
if (entity == null) {
|
||||
entity = p.getEntity();
|
||||
} else if (entity != p.getEntity()) {
|
||||
throw new HelenusMappingException(
|
||||
"you can count columns only in single entity "
|
||||
+ entity.getMappingInterface()
|
||||
+ " or "
|
||||
+ p.getEntity().getMappingInterface());
|
||||
}
|
||||
}
|
||||
private void addPropertyNode(HelenusPropertyNode p) {
|
||||
if (entity == null) {
|
||||
entity = p.getEntity();
|
||||
} else if (entity != p.getEntity()) {
|
||||
throw new HelenusMappingException("you can count columns only in single entity "
|
||||
+ entity.getMappingInterface() + " or " + p.getEntity().getMappingInterface());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ import com.datastax.driver.core.querybuilder.BuiltStatement;
|
|||
import com.datastax.driver.core.querybuilder.Delete;
|
||||
import com.datastax.driver.core.querybuilder.Delete.Where;
|
||||
import com.datastax.driver.core.querybuilder.QueryBuilder;
|
||||
|
||||
import net.helenus.core.AbstractSessionOperations;
|
||||
import net.helenus.core.Filter;
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
|
@ -28,100 +29,97 @@ import net.helenus.support.HelenusMappingException;
|
|||
|
||||
public final class DeleteOperation extends AbstractFilterOperation<ResultSet, DeleteOperation> {
|
||||
|
||||
private HelenusEntity entity;
|
||||
private HelenusEntity entity;
|
||||
|
||||
private boolean ifExists = false;
|
||||
private boolean ifExists = false;
|
||||
|
||||
private int[] ttl;
|
||||
private long[] timestamp;
|
||||
private int[] ttl;
|
||||
private long[] timestamp;
|
||||
|
||||
public DeleteOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
}
|
||||
public DeleteOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
}
|
||||
|
||||
public DeleteOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity) {
|
||||
super(sessionOperations);
|
||||
public DeleteOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity) {
|
||||
super(sessionOperations);
|
||||
|
||||
this.entity = entity;
|
||||
}
|
||||
this.entity = entity;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BuiltStatement buildStatement(boolean cached) {
|
||||
@Override
|
||||
public BuiltStatement buildStatement(boolean cached) {
|
||||
|
||||
if (filters != null && !filters.isEmpty()) {
|
||||
filters.forEach(f -> addPropertyNode(f.getNode()));
|
||||
}
|
||||
if (filters != null && !filters.isEmpty()) {
|
||||
filters.forEach(f -> addPropertyNode(f.getNode()));
|
||||
}
|
||||
|
||||
if (entity == null) {
|
||||
throw new HelenusMappingException("unknown entity");
|
||||
}
|
||||
if (entity == null) {
|
||||
throw new HelenusMappingException("unknown entity");
|
||||
}
|
||||
|
||||
if (filters != null && !filters.isEmpty()) {
|
||||
if (filters != null && !filters.isEmpty()) {
|
||||
|
||||
Delete delete = QueryBuilder.delete().from(entity.getName().toCql());
|
||||
Delete delete = QueryBuilder.delete().from(entity.getName().toCql());
|
||||
|
||||
if (this.ifExists) {
|
||||
delete.ifExists();
|
||||
}
|
||||
if (this.ifExists) {
|
||||
delete.ifExists();
|
||||
}
|
||||
|
||||
Where where = delete.where();
|
||||
Where where = delete.where();
|
||||
|
||||
for (Filter<?> filter : filters) {
|
||||
where.and(filter.getClause(sessionOps.getValuePreparer()));
|
||||
}
|
||||
for (Filter<?> filter : filters) {
|
||||
where.and(filter.getClause(sessionOps.getValuePreparer()));
|
||||
}
|
||||
|
||||
if (ifFilters != null && !ifFilters.isEmpty()) {
|
||||
if (ifFilters != null && !ifFilters.isEmpty()) {
|
||||
|
||||
for (Filter<?> filter : ifFilters) {
|
||||
delete.onlyIf(filter.getClause(sessionOps.getValuePreparer()));
|
||||
}
|
||||
}
|
||||
for (Filter<?> filter : ifFilters) {
|
||||
delete.onlyIf(filter.getClause(sessionOps.getValuePreparer()));
|
||||
}
|
||||
}
|
||||
|
||||
if (this.ttl != null) {
|
||||
delete.using(QueryBuilder.ttl(this.ttl[0]));
|
||||
}
|
||||
if (this.timestamp != null) {
|
||||
delete.using(QueryBuilder.timestamp(this.timestamp[0]));
|
||||
}
|
||||
if (this.ttl != null) {
|
||||
delete.using(QueryBuilder.ttl(this.ttl[0]));
|
||||
}
|
||||
if (this.timestamp != null) {
|
||||
delete.using(QueryBuilder.timestamp(this.timestamp[0]));
|
||||
}
|
||||
|
||||
return delete;
|
||||
return delete;
|
||||
|
||||
} else {
|
||||
return QueryBuilder.truncate(entity.getName().toCql());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return QueryBuilder.truncate(entity.getName().toCql());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public ResultSet transform(ResultSet resultSet) {
|
||||
return resultSet;
|
||||
}
|
||||
@Override
|
||||
public ResultSet transform(ResultSet resultSet) {
|
||||
return resultSet;
|
||||
}
|
||||
|
||||
public DeleteOperation ifExists() {
|
||||
this.ifExists = true;
|
||||
return this;
|
||||
}
|
||||
public DeleteOperation ifExists() {
|
||||
this.ifExists = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
public DeleteOperation usingTtl(int ttl) {
|
||||
this.ttl = new int[1];
|
||||
this.ttl[0] = ttl;
|
||||
return this;
|
||||
}
|
||||
public DeleteOperation usingTtl(int ttl) {
|
||||
this.ttl = new int[1];
|
||||
this.ttl[0] = ttl;
|
||||
return this;
|
||||
}
|
||||
|
||||
public DeleteOperation usingTimestamp(long timestamp) {
|
||||
this.timestamp = new long[1];
|
||||
this.timestamp[0] = timestamp;
|
||||
return this;
|
||||
}
|
||||
public DeleteOperation usingTimestamp(long timestamp) {
|
||||
this.timestamp = new long[1];
|
||||
this.timestamp[0] = timestamp;
|
||||
return this;
|
||||
}
|
||||
|
||||
private void addPropertyNode(HelenusPropertyNode p) {
|
||||
if (entity == null) {
|
||||
entity = p.getEntity();
|
||||
} else if (entity != p.getEntity()) {
|
||||
throw new HelenusMappingException(
|
||||
"you can delete rows only in single entity "
|
||||
+ entity.getMappingInterface()
|
||||
+ " or "
|
||||
+ p.getEntity().getMappingInterface());
|
||||
}
|
||||
}
|
||||
private void addPropertyNode(HelenusPropertyNode p) {
|
||||
if (entity == null) {
|
||||
entity = p.getEntity();
|
||||
} else if (entity != p.getEntity()) {
|
||||
throw new HelenusMappingException("you can delete rows only in single entity "
|
||||
+ entity.getMappingInterface() + " or " + p.getEntity().getMappingInterface());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,18 +15,20 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.function.Function;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
import com.datastax.driver.core.querybuilder.Insert;
|
||||
import com.datastax.driver.core.querybuilder.QueryBuilder;
|
||||
import com.google.common.base.Joiner;
|
||||
import java.util.*;
|
||||
import java.util.function.Function;
|
||||
|
||||
import net.helenus.core.AbstractSessionOperations;
|
||||
import net.helenus.core.Getter;
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.UnitOfWork;
|
||||
import net.helenus.core.reflect.DefaultPrimitiveTypes;
|
||||
import net.helenus.core.reflect.Drafted;
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
|
@ -38,230 +40,210 @@ import net.helenus.support.HelenusMappingException;
|
|||
|
||||
public final class InsertOperation<T> extends AbstractOperation<T, InsertOperation<T>> {
|
||||
|
||||
private HelenusEntity entity;
|
||||
private final List<Fun.Tuple2<HelenusPropertyNode, Object>> values = new ArrayList<Fun.Tuple2<HelenusPropertyNode, Object>>();
|
||||
private final T pojo;
|
||||
private final Class<?> resultType;
|
||||
private HelenusEntity entity;
|
||||
private boolean ifNotExists;
|
||||
|
||||
private final List<Fun.Tuple2<HelenusPropertyNode, Object>> values =
|
||||
new ArrayList<Fun.Tuple2<HelenusPropertyNode, Object>>();
|
||||
private final T pojo;
|
||||
private final Class<?> resultType;
|
||||
private boolean ifNotExists;
|
||||
private int[] ttl;
|
||||
private long[] timestamp;
|
||||
|
||||
private int[] ttl;
|
||||
private long[] timestamp;
|
||||
public InsertOperation(AbstractSessionOperations sessionOperations, boolean ifNotExists) {
|
||||
super(sessionOperations);
|
||||
|
||||
public InsertOperation(AbstractSessionOperations sessionOperations, boolean ifNotExists) {
|
||||
super(sessionOperations);
|
||||
this.ifNotExists = ifNotExists;
|
||||
this.pojo = null;
|
||||
this.resultType = ResultSet.class;
|
||||
}
|
||||
|
||||
this.ifNotExists = ifNotExists;
|
||||
this.pojo = null;
|
||||
this.resultType = ResultSet.class;
|
||||
}
|
||||
public InsertOperation(AbstractSessionOperations sessionOperations, Class<?> resultType, boolean ifNotExists) {
|
||||
super(sessionOperations);
|
||||
|
||||
public InsertOperation(
|
||||
AbstractSessionOperations sessionOperations, Class<?> resultType, boolean ifNotExists) {
|
||||
super(sessionOperations);
|
||||
this.ifNotExists = ifNotExists;
|
||||
this.pojo = null;
|
||||
this.resultType = resultType;
|
||||
}
|
||||
|
||||
this.ifNotExists = ifNotExists;
|
||||
this.pojo = null;
|
||||
this.resultType = resultType;
|
||||
}
|
||||
public InsertOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity, T pojo,
|
||||
Set<String> mutations, boolean ifNotExists) {
|
||||
super(sessionOperations);
|
||||
|
||||
public InsertOperation(
|
||||
AbstractSessionOperations sessionOperations,
|
||||
HelenusEntity entity,
|
||||
T pojo,
|
||||
Set<String> mutations,
|
||||
boolean ifNotExists) {
|
||||
super(sessionOperations);
|
||||
this.entity = entity;
|
||||
this.pojo = pojo;
|
||||
this.ifNotExists = ifNotExists;
|
||||
this.resultType = entity.getMappingInterface();
|
||||
|
||||
this.entity = entity;
|
||||
this.pojo = pojo;
|
||||
this.ifNotExists = ifNotExists;
|
||||
this.resultType = entity.getMappingInterface();
|
||||
Collection<HelenusProperty> properties = entity.getOrderedProperties();
|
||||
Set<String> keys = (mutations == null) ? null : mutations;
|
||||
|
||||
Collection<HelenusProperty> properties = entity.getOrderedProperties();
|
||||
Set<String> keys = (mutations == null) ? null : mutations;
|
||||
for (HelenusProperty prop : properties) {
|
||||
boolean addProp = false;
|
||||
|
||||
for (HelenusProperty prop : properties) {
|
||||
switch (prop.getColumnType()) {
|
||||
case PARTITION_KEY :
|
||||
case CLUSTERING_COLUMN :
|
||||
addProp = true;
|
||||
break;
|
||||
default :
|
||||
addProp = (keys == null || keys.contains(prop.getPropertyName()));
|
||||
}
|
||||
|
||||
if (keys == null || keys.contains(prop.getPropertyName())) {
|
||||
if (addProp) {
|
||||
Object value = BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop);
|
||||
value = sessionOps.getValuePreparer().prepareColumnValue(value, prop);
|
||||
|
||||
Object value = BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop);
|
||||
value = sessionOps.getValuePreparer().prepareColumnValue(value, prop);
|
||||
if (value != null) {
|
||||
HelenusPropertyNode node = new HelenusPropertyNode(prop, Optional.empty());
|
||||
values.add(Fun.Tuple2.of(node, value));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (value != null) {
|
||||
HelenusPropertyNode node = new HelenusPropertyNode(prop, Optional.empty());
|
||||
values.add(Fun.Tuple2.of(node, value));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
public InsertOperation<T> ifNotExists() {
|
||||
this.ifNotExists = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
public InsertOperation<T> ifNotExists() {
|
||||
this.ifNotExists = true;
|
||||
return this;
|
||||
}
|
||||
public InsertOperation<T> ifNotExists(boolean enable) {
|
||||
this.ifNotExists = enable;
|
||||
return this;
|
||||
}
|
||||
|
||||
public InsertOperation<T> ifNotExists(boolean enable) {
|
||||
this.ifNotExists = enable;
|
||||
return this;
|
||||
}
|
||||
public <V> InsertOperation<T> value(Getter<V> getter, V val) {
|
||||
|
||||
public <V> InsertOperation<T> value(Getter<V> getter, V val) {
|
||||
Objects.requireNonNull(getter, "getter is empty");
|
||||
|
||||
Objects.requireNonNull(getter, "getter is empty");
|
||||
if (val != null) {
|
||||
HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter);
|
||||
Object value = sessionOps.getValuePreparer().prepareColumnValue(val, node.getProperty());
|
||||
|
||||
if (val != null) {
|
||||
HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter);
|
||||
Object value = sessionOps.getValuePreparer().prepareColumnValue(val, node.getProperty());
|
||||
if (value != null) {
|
||||
values.add(Fun.Tuple2.of(node, value));
|
||||
}
|
||||
}
|
||||
|
||||
if (value != null) {
|
||||
values.add(Fun.Tuple2.of(node, value));
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
@Override
|
||||
public BuiltStatement buildStatement(boolean cached) {
|
||||
|
||||
@Override
|
||||
public BuiltStatement buildStatement(boolean cached) {
|
||||
values.forEach(t -> addPropertyNode(t._1));
|
||||
|
||||
values.forEach(t -> addPropertyNode(t._1));
|
||||
if (values.isEmpty())
|
||||
return null;
|
||||
|
||||
if (values.isEmpty()) return null;
|
||||
if (entity == null) {
|
||||
throw new HelenusMappingException("unknown entity");
|
||||
}
|
||||
|
||||
if (entity == null) {
|
||||
throw new HelenusMappingException("unknown entity");
|
||||
}
|
||||
Insert insert = QueryBuilder.insertInto(entity.getName().toCql());
|
||||
|
||||
Insert insert = QueryBuilder.insertInto(entity.getName().toCql());
|
||||
if (ifNotExists) {
|
||||
insert.ifNotExists();
|
||||
}
|
||||
|
||||
if (ifNotExists) {
|
||||
insert.ifNotExists();
|
||||
}
|
||||
values.forEach(t -> {
|
||||
insert.value(t._1.getColumnName(), t._2);
|
||||
});
|
||||
|
||||
values.forEach(
|
||||
t -> {
|
||||
insert.value(t._1.getColumnName(), t._2);
|
||||
});
|
||||
if (this.ttl != null) {
|
||||
insert.using(QueryBuilder.ttl(this.ttl[0]));
|
||||
}
|
||||
if (this.timestamp != null) {
|
||||
insert.using(QueryBuilder.timestamp(this.timestamp[0]));
|
||||
}
|
||||
|
||||
if (this.ttl != null) {
|
||||
insert.using(QueryBuilder.ttl(this.ttl[0]));
|
||||
}
|
||||
if (this.timestamp != null) {
|
||||
insert.using(QueryBuilder.timestamp(this.timestamp[0]));
|
||||
}
|
||||
return insert;
|
||||
}
|
||||
|
||||
return insert;
|
||||
}
|
||||
@Override
|
||||
public T transform(ResultSet resultSet) {
|
||||
Class<?> iface = entity.getMappingInterface();
|
||||
if (resultType == iface) {
|
||||
if (values.size() > 0) {
|
||||
boolean immutable = iface.isAssignableFrom(Drafted.class);
|
||||
Collection<HelenusProperty> properties = entity.getOrderedProperties();
|
||||
Map<String, Object> backingMap = new HashMap<String, Object>(properties.size());
|
||||
|
||||
@Override
|
||||
public T transform(ResultSet resultSet) {
|
||||
Class<?> iface = entity.getMappingInterface();
|
||||
if (resultType == iface) {
|
||||
if (values.size() > 0) {
|
||||
Collection<HelenusProperty> properties = entity.getOrderedProperties();
|
||||
Map<String, Object> backingMap = new HashMap<String, Object>(properties.size());
|
||||
// First, add all the inserted values into our new map.
|
||||
values.forEach(t -> backingMap.put(t._1.getProperty().getPropertyName(), t._2));
|
||||
|
||||
// First, add all the inserted values into our new map.
|
||||
values.forEach(t -> backingMap.put(t._1.getProperty().getPropertyName(), t._2));
|
||||
// Then, fill in all the rest of the properties.
|
||||
for (HelenusProperty prop : properties) {
|
||||
String key = prop.getPropertyName();
|
||||
if (backingMap.containsKey(key)) {
|
||||
// Some values man need to be converted (e.g. from String to Enum). This is done
|
||||
// within the BeanColumnValueProvider below.
|
||||
Optional<Function<Object, Object>> converter = prop
|
||||
.getReadConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
backingMap.put(key, converter.get().apply(backingMap.get(key)));
|
||||
}
|
||||
} else {
|
||||
// If we started this operation with an instance of this type, use values from
|
||||
// that.
|
||||
if (pojo != null) {
|
||||
backingMap.put(key,
|
||||
BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, immutable));
|
||||
} else {
|
||||
// Otherwise we'll use default values for the property type if available.
|
||||
Class<?> propType = prop.getJavaType();
|
||||
if (propType.isPrimitive()) {
|
||||
DefaultPrimitiveTypes type = DefaultPrimitiveTypes.lookup(propType);
|
||||
if (type == null) {
|
||||
throw new HelenusException("unknown primitive type " + propType);
|
||||
}
|
||||
backingMap.put(key, type.getDefaultValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Then, fill in all the rest of the properties.
|
||||
for (HelenusProperty prop : properties) {
|
||||
String key = prop.getPropertyName();
|
||||
if (backingMap.containsKey(key)) {
|
||||
// Some values man need to be converted (e.g. from String to Enum). This is done
|
||||
// within the BeanColumnValueProvider below.
|
||||
Optional<Function<Object, Object>> converter =
|
||||
prop.getReadConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
backingMap.put(key, converter.get().apply(backingMap.get(key)));
|
||||
}
|
||||
} else {
|
||||
// If we started this operation with an instance of this type, use values from that.
|
||||
if (pojo != null) {
|
||||
backingMap.put(key, BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
|
||||
} else {
|
||||
// Otherwise we'll use default values for the property type if available.
|
||||
Class<?> propType = prop.getJavaType();
|
||||
if (propType.isPrimitive()) {
|
||||
DefaultPrimitiveTypes type = DefaultPrimitiveTypes.lookup(propType);
|
||||
if (type == null) {
|
||||
throw new HelenusException("unknown primitive type " + propType);
|
||||
}
|
||||
backingMap.put(key, type.getDefaultValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// Lastly, create a new proxy object for the entity and return the new instance.
|
||||
return (T) Helenus.map(iface, backingMap);
|
||||
}
|
||||
// Oddly, this insert didn't change any value so simply return the pojo.
|
||||
// TODO(gburd): this pojo is the result of a Draft.build() call which will not
|
||||
// preserve object identity (o1 == o2), ... fix me.
|
||||
return (T) pojo;
|
||||
}
|
||||
return (T) resultSet;
|
||||
}
|
||||
|
||||
// Lastly, create a new proxy object for the entity and return the new instance.
|
||||
return (T) Helenus.map(iface, backingMap);
|
||||
}
|
||||
// Oddly, this insert didn't change any value so simply return the pojo.
|
||||
// TODO(gburd): this pojo is the result of a Draft.build() call which will not preserve object identity (o1 == o2), ... fix me.
|
||||
return (T) pojo;
|
||||
}
|
||||
return (T) resultSet;
|
||||
}
|
||||
public InsertOperation<T> usingTtl(int ttl) {
|
||||
this.ttl = new int[1];
|
||||
this.ttl[0] = ttl;
|
||||
return this;
|
||||
}
|
||||
|
||||
public InsertOperation<T> usingTtl(int ttl) {
|
||||
this.ttl = new int[1];
|
||||
this.ttl[0] = ttl;
|
||||
return this;
|
||||
}
|
||||
public InsertOperation<T> usingTimestamp(long timestamp) {
|
||||
this.timestamp = new long[1];
|
||||
this.timestamp[0] = timestamp;
|
||||
return this;
|
||||
}
|
||||
|
||||
public InsertOperation<T> usingTimestamp(long timestamp) {
|
||||
this.timestamp = new long[1];
|
||||
this.timestamp[0] = timestamp;
|
||||
return this;
|
||||
}
|
||||
private void addPropertyNode(HelenusPropertyNode p) {
|
||||
if (entity == null) {
|
||||
entity = p.getEntity();
|
||||
} else if (entity != p.getEntity()) {
|
||||
throw new HelenusMappingException("you can insert only single entity " + entity.getMappingInterface()
|
||||
+ " or " + p.getEntity().getMappingInterface());
|
||||
}
|
||||
}
|
||||
|
||||
private void addPropertyNode(HelenusPropertyNode p) {
|
||||
if (entity == null) {
|
||||
entity = p.getEntity();
|
||||
} else if (entity != p.getEntity()) {
|
||||
throw new HelenusMappingException(
|
||||
"you can insert only single entity "
|
||||
+ entity.getMappingInterface()
|
||||
+ " or "
|
||||
+ p.getEntity().getMappingInterface());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getStatementCacheKey() {
|
||||
List<String> keys = new ArrayList<>(values.size());
|
||||
values.forEach(
|
||||
t -> {
|
||||
HelenusPropertyNode prop = t._1;
|
||||
switch (prop.getProperty().getColumnType()) {
|
||||
case PARTITION_KEY:
|
||||
case CLUSTERING_COLUMN:
|
||||
keys.add(prop.getColumnName() + "==" + t._2.toString());
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
});
|
||||
return entity.getName() + ": " + Joiner.on(",").join(keys);
|
||||
}
|
||||
|
||||
@Override
|
||||
public T sync(UnitOfWork uow) {
|
||||
if (uow == null) {
|
||||
return sync();
|
||||
}
|
||||
T result = super.sync(uow);
|
||||
Class<?> iface = entity.getMappingInterface();
|
||||
if (resultType == iface) {
|
||||
String key = getStatementCacheKey();
|
||||
if (key != null) {
|
||||
Set<Object> set = new HashSet<Object>(1);
|
||||
set.add(result);
|
||||
uow.getCache().put(key, set);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
@Override
|
||||
public T sync(UnitOfWork uow) {// throws TimeoutException {
|
||||
if (uow == null) {
|
||||
return sync();
|
||||
}
|
||||
T result = super.sync(uow);
|
||||
Class<?> iface = entity.getMappingInterface();
|
||||
if (resultType == iface) {
|
||||
updateCache(uow, result, entity.getFacets());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,79 +1,111 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import brave.Span;
|
||||
import brave.Tracer;
|
||||
import brave.propagation.TraceContext;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import com.codahale.metrics.Meter;
|
||||
import com.codahale.metrics.MetricRegistry;
|
||||
import com.codahale.metrics.Timer;
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.ResultSetFuture;
|
||||
import com.datastax.driver.core.Statement;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import com.google.common.base.Stopwatch;
|
||||
|
||||
import brave.Span;
|
||||
import brave.Tracer;
|
||||
import brave.propagation.TraceContext;
|
||||
import net.helenus.core.AbstractSessionOperations;
|
||||
import net.helenus.core.UnitOfWork;
|
||||
import net.helenus.core.cache.Facet;
|
||||
|
||||
public abstract class Operation<E> {
|
||||
|
||||
protected final AbstractSessionOperations sessionOps;
|
||||
protected final Meter uowCacheHits;
|
||||
protected final Meter uowCacheMiss;
|
||||
protected final Timer requestLatency;
|
||||
protected final AbstractSessionOperations sessionOps;
|
||||
protected final Meter uowCacheHits;
|
||||
protected final Meter uowCacheMiss;
|
||||
protected final Timer requestLatency;
|
||||
|
||||
Operation(AbstractSessionOperations sessionOperations) {
|
||||
this.sessionOps = sessionOperations;
|
||||
MetricRegistry metrics = sessionOperations.getMetricRegistry();
|
||||
this.uowCacheHits = metrics.meter("net.helenus.UOW-cache-hits");
|
||||
this.uowCacheMiss = metrics.meter("net.helenus.UOW-cache-miss");
|
||||
this.requestLatency = metrics.timer("net.helenus.request-latency");
|
||||
}
|
||||
Operation(AbstractSessionOperations sessionOperations) {
|
||||
this.sessionOps = sessionOperations;
|
||||
MetricRegistry metrics = sessionOperations.getMetricRegistry();
|
||||
this.uowCacheHits = metrics.meter("net.helenus.UOW-cache-hits");
|
||||
this.uowCacheMiss = metrics.meter("net.helenus.UOW-cache-miss");
|
||||
this.requestLatency = metrics.timer("net.helenus.request-latency");
|
||||
}
|
||||
|
||||
public ResultSet execute(
|
||||
AbstractSessionOperations session,
|
||||
UnitOfWork uow,
|
||||
TraceContext traceContext,
|
||||
boolean showValues,
|
||||
boolean cached) {
|
||||
public ResultSet execute(AbstractSessionOperations session, UnitOfWork uow, TraceContext traceContext, long timeout,
|
||||
TimeUnit units, boolean showValues, boolean cached) { // throws TimeoutException {
|
||||
|
||||
// Start recording in a Zipkin sub-span our execution time to perform this operation.
|
||||
Tracer tracer = session.getZipkinTracer();
|
||||
Span span = null;
|
||||
if (tracer != null && traceContext != null) {
|
||||
span = tracer.newChild(traceContext);
|
||||
}
|
||||
// Start recording in a Zipkin sub-span our execution time to perform this
|
||||
// operation.
|
||||
Tracer tracer = session.getZipkinTracer();
|
||||
Span span = null;
|
||||
if (tracer != null && traceContext != null) {
|
||||
span = tracer.newChild(traceContext);
|
||||
}
|
||||
|
||||
try {
|
||||
try {
|
||||
|
||||
if (span != null) {
|
||||
span.name("cassandra");
|
||||
span.start();
|
||||
}
|
||||
if (span != null) {
|
||||
span.name("cassandra");
|
||||
span.start();
|
||||
}
|
||||
|
||||
Statement statement = options(buildStatement(cached));
|
||||
ResultSetFuture futureResultSet = session.executeAsync(statement, showValues);
|
||||
return futureResultSet.get();
|
||||
Statement statement = options(buildStatement(cached));
|
||||
Stopwatch timer = null;
|
||||
if (uow != null) {
|
||||
timer = uow.getExecutionTimer();
|
||||
timer.start();
|
||||
}
|
||||
ResultSetFuture futureResultSet = session.executeAsync(statement, showValues);
|
||||
ResultSet resultSet = futureResultSet.getUninterruptibly(); // TODO(gburd): (timeout, units);
|
||||
|
||||
} catch (InterruptedException | ExecutionException e) {
|
||||
if (uow != null)
|
||||
timer.stop();
|
||||
|
||||
throw new RuntimeException(e);
|
||||
return resultSet;
|
||||
|
||||
} finally {
|
||||
} finally {
|
||||
|
||||
if (span != null) {
|
||||
span.finish();
|
||||
}
|
||||
}
|
||||
}
|
||||
if (span != null) {
|
||||
span.finish();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public Statement options(Statement statement) {
|
||||
return statement;
|
||||
}
|
||||
public Statement options(Statement statement) {
|
||||
return statement;
|
||||
}
|
||||
|
||||
public Statement buildStatement(boolean cached) {
|
||||
return null;
|
||||
}
|
||||
public Statement buildStatement(boolean cached) {
|
||||
return null;
|
||||
}
|
||||
|
||||
public List<Facet> getFacets() {
|
||||
return null;
|
||||
}
|
||||
|
||||
public List<Facet> bindFacetValues() {
|
||||
return null;
|
||||
}
|
||||
|
||||
public boolean isSessionCacheable() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public String getStatementCacheKey() {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,27 +20,28 @@ import com.datastax.driver.core.PreparedStatement;
|
|||
|
||||
public final class PreparedOperation<E> {
|
||||
|
||||
private final PreparedStatement preparedStatement;
|
||||
private final AbstractOperation<E, ?> operation;
|
||||
private final PreparedStatement preparedStatement;
|
||||
private final AbstractOperation<E, ?> operation;
|
||||
|
||||
public PreparedOperation(PreparedStatement statement, AbstractOperation<E, ?> operation) {
|
||||
this.preparedStatement = statement;
|
||||
this.operation = operation;
|
||||
}
|
||||
public PreparedOperation(PreparedStatement statement, AbstractOperation<E, ?> operation) {
|
||||
this.preparedStatement = statement;
|
||||
this.operation = operation;
|
||||
}
|
||||
|
||||
public PreparedStatement getPreparedStatement() {
|
||||
return preparedStatement;
|
||||
}
|
||||
public PreparedStatement getPreparedStatement() {
|
||||
return preparedStatement;
|
||||
}
|
||||
|
||||
public BoundOperation<E> bind(Object... params) {
|
||||
public BoundOperation<E> bind(Object... params) {
|
||||
|
||||
BoundStatement boundStatement = preparedStatement.bind(params);
|
||||
BoundStatement boundStatement = preparedStatement.bind(params);
|
||||
|
||||
return new BoundOperation<E>(boundStatement, operation);
|
||||
}
|
||||
return new BoundOperation<E>(boundStatement, operation);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return preparedStatement.getQueryString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return preparedStatement.getQueryString();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,28 +20,27 @@ import com.datastax.driver.core.PreparedStatement;
|
|||
|
||||
public final class PreparedOptionalOperation<E> {
|
||||
|
||||
private final PreparedStatement preparedStatement;
|
||||
private final AbstractOptionalOperation<E, ?> operation;
|
||||
private final PreparedStatement preparedStatement;
|
||||
private final AbstractOptionalOperation<E, ?> operation;
|
||||
|
||||
public PreparedOptionalOperation(
|
||||
PreparedStatement statement, AbstractOptionalOperation<E, ?> operation) {
|
||||
this.preparedStatement = statement;
|
||||
this.operation = operation;
|
||||
}
|
||||
public PreparedOptionalOperation(PreparedStatement statement, AbstractOptionalOperation<E, ?> operation) {
|
||||
this.preparedStatement = statement;
|
||||
this.operation = operation;
|
||||
}
|
||||
|
||||
public PreparedStatement getPreparedStatement() {
|
||||
return preparedStatement;
|
||||
}
|
||||
public PreparedStatement getPreparedStatement() {
|
||||
return preparedStatement;
|
||||
}
|
||||
|
||||
public BoundOptionalOperation<E> bind(Object... params) {
|
||||
public BoundOptionalOperation<E> bind(Object... params) {
|
||||
|
||||
BoundStatement boundStatement = preparedStatement.bind(params);
|
||||
BoundStatement boundStatement = preparedStatement.bind(params);
|
||||
|
||||
return new BoundOptionalOperation<E>(boundStatement, operation);
|
||||
}
|
||||
return new BoundOptionalOperation<E>(boundStatement, operation);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return preparedStatement.getQueryString();
|
||||
}
|
||||
@Override
|
||||
public String toString() {
|
||||
return preparedStatement.getQueryString();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,26 +20,25 @@ import com.datastax.driver.core.PreparedStatement;
|
|||
|
||||
public final class PreparedStreamOperation<E> {
|
||||
|
||||
private final PreparedStatement preparedStatement;
|
||||
private final AbstractStreamOperation<E, ?> operation;
|
||||
private final PreparedStatement preparedStatement;
|
||||
private final AbstractStreamOperation<E, ?> operation;
|
||||
|
||||
public PreparedStreamOperation(
|
||||
PreparedStatement statement, AbstractStreamOperation<E, ?> operation) {
|
||||
this.preparedStatement = statement;
|
||||
this.operation = operation;
|
||||
}
|
||||
public PreparedStreamOperation(PreparedStatement statement, AbstractStreamOperation<E, ?> operation) {
|
||||
this.preparedStatement = statement;
|
||||
this.operation = operation;
|
||||
}
|
||||
|
||||
public PreparedStatement getPreparedStatement() {
|
||||
return preparedStatement;
|
||||
}
|
||||
public PreparedStatement getPreparedStatement() {
|
||||
return preparedStatement;
|
||||
}
|
||||
|
||||
public BoundStreamOperation<E> bind(Object... params) {
|
||||
BoundStatement boundStatement = preparedStatement.bind(params);
|
||||
return new BoundStreamOperation<E>(boundStatement, operation);
|
||||
}
|
||||
public BoundStreamOperation<E> bind(Object... params) {
|
||||
BoundStatement boundStatement = preparedStatement.bind(params);
|
||||
return new BoundStreamOperation<E>(boundStatement, operation);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return preparedStatement.getQueryString();
|
||||
}
|
||||
@Override
|
||||
public String toString() {
|
||||
return preparedStatement.getQueryString();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,40 +15,53 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Function;
|
||||
|
||||
public final class SelectFirstOperation<E>
|
||||
extends AbstractFilterOptionalOperation<E, SelectFirstOperation<E>> {
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
|
||||
private final SelectOperation<E> delegate;
|
||||
import net.helenus.core.cache.Facet;
|
||||
|
||||
public SelectFirstOperation(SelectOperation<E> delegate) {
|
||||
super(delegate.sessionOps);
|
||||
public final class SelectFirstOperation<E> extends AbstractFilterOptionalOperation<E, SelectFirstOperation<E>> {
|
||||
|
||||
this.delegate = delegate;
|
||||
this.filters = delegate.filters;
|
||||
this.ifFilters = delegate.ifFilters;
|
||||
}
|
||||
private final SelectOperation<E> delegate;
|
||||
|
||||
public <R> SelectFirstTransformingOperation<R, E> map(Function<E, R> fn) {
|
||||
return new SelectFirstTransformingOperation<R, E>(delegate, fn);
|
||||
}
|
||||
public SelectFirstOperation(SelectOperation<E> delegate) {
|
||||
super(delegate.sessionOps);
|
||||
|
||||
@Override
|
||||
public String getStatementCacheKey() {
|
||||
return delegate.getStatementCacheKey();
|
||||
}
|
||||
this.delegate = delegate;
|
||||
this.filters = delegate.filters;
|
||||
this.ifFilters = delegate.ifFilters;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BuiltStatement buildStatement(boolean cached) {
|
||||
return delegate.buildStatement(cached);
|
||||
}
|
||||
public <R> SelectFirstTransformingOperation<R, E> map(Function<E, R> fn) {
|
||||
return new SelectFirstTransformingOperation<R, E>(delegate, fn);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<E> transform(ResultSet resultSet) {
|
||||
return delegate.transform(resultSet).findFirst();
|
||||
}
|
||||
@Override
|
||||
public BuiltStatement buildStatement(boolean cached) {
|
||||
return delegate.buildStatement(cached);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Facet> getFacets() {
|
||||
return delegate.getFacets();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Facet> bindFacetValues() {
|
||||
return delegate.bindFacetValues();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<E> transform(ResultSet resultSet) {
|
||||
return delegate.transform(resultSet).findFirst();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isSessionCacheable() {
|
||||
return delegate.isSessionCacheable();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,38 +15,48 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Function;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
|
||||
import net.helenus.core.cache.Facet;
|
||||
|
||||
public final class SelectFirstTransformingOperation<R, E>
|
||||
extends AbstractFilterOptionalOperation<R, SelectFirstTransformingOperation<R, E>> {
|
||||
extends
|
||||
AbstractFilterOptionalOperation<R, SelectFirstTransformingOperation<R, E>> {
|
||||
|
||||
private final SelectOperation<E> delegate;
|
||||
private final Function<E, R> fn;
|
||||
private final SelectOperation<E> delegate;
|
||||
private final Function<E, R> fn;
|
||||
|
||||
public SelectFirstTransformingOperation(SelectOperation<E> delegate, Function<E, R> fn) {
|
||||
super(delegate.sessionOps);
|
||||
public SelectFirstTransformingOperation(SelectOperation<E> delegate, Function<E, R> fn) {
|
||||
super(delegate.sessionOps);
|
||||
|
||||
this.delegate = delegate;
|
||||
this.fn = fn;
|
||||
this.filters = delegate.filters;
|
||||
this.ifFilters = delegate.ifFilters;
|
||||
}
|
||||
this.delegate = delegate;
|
||||
this.fn = fn;
|
||||
this.filters = delegate.filters;
|
||||
this.ifFilters = delegate.ifFilters;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getStatementCacheKey() {
|
||||
return delegate.getStatementCacheKey();
|
||||
}
|
||||
@Override
|
||||
public List<Facet> bindFacetValues() {
|
||||
return delegate.bindFacetValues();
|
||||
}
|
||||
|
||||
@Override
|
||||
public BuiltStatement buildStatement(boolean cached) {
|
||||
return delegate.buildStatement(cached);
|
||||
}
|
||||
@Override
|
||||
public BuiltStatement buildStatement(boolean cached) {
|
||||
return delegate.buildStatement(cached);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<R> transform(ResultSet resultSet) {
|
||||
return delegate.transform(resultSet).findFirst().map(fn);
|
||||
}
|
||||
@Override
|
||||
public Optional<R> transform(ResultSet resultSet) {
|
||||
return delegate.transform(resultSet).findFirst().map(fn);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isSessionCacheable() {
|
||||
return delegate.isSessionCacheable();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,6 +15,14 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Stream;
|
||||
import java.util.stream.StreamSupport;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.Row;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
|
@ -23,13 +31,11 @@ import com.datastax.driver.core.querybuilder.QueryBuilder;
|
|||
import com.datastax.driver.core.querybuilder.Select;
|
||||
import com.datastax.driver.core.querybuilder.Select.Selection;
|
||||
import com.datastax.driver.core.querybuilder.Select.Where;
|
||||
import com.google.common.base.Joiner;
|
||||
import com.google.common.collect.Iterables;
|
||||
import java.util.*;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Stream;
|
||||
import java.util.stream.StreamSupport;
|
||||
|
||||
import net.helenus.core.*;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.core.cache.UnboundFacet;
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.mapping.MappingUtil;
|
||||
|
@ -41,283 +47,278 @@ import net.helenus.support.HelenusMappingException;
|
|||
|
||||
public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, SelectOperation<E>> {
|
||||
|
||||
protected Function<Row, E> rowMapper = null;
|
||||
protected final List<HelenusPropertyNode> props = new ArrayList<HelenusPropertyNode>();
|
||||
private static final Logger LOG = LoggerFactory.getLogger(SelectOperation.class);
|
||||
|
||||
protected List<Ordering> ordering = null;
|
||||
protected Integer limit = null;
|
||||
protected boolean allowFiltering = false;
|
||||
protected String alternateTableName = null;
|
||||
protected final List<HelenusPropertyNode> props = new ArrayList<HelenusPropertyNode>();
|
||||
protected Function<Row, E> rowMapper = null;
|
||||
protected List<Ordering> ordering = null;
|
||||
protected Integer limit = null;
|
||||
protected boolean allowFiltering = false;
|
||||
protected String alternateTableName = null;
|
||||
protected boolean isCacheable = false;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public SelectOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
@SuppressWarnings("unchecked")
|
||||
public SelectOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
|
||||
this.rowMapper =
|
||||
new Function<Row, E>() {
|
||||
this.rowMapper = new Function<Row, E>() {
|
||||
|
||||
@Override
|
||||
public E apply(Row source) {
|
||||
@Override
|
||||
public E apply(Row source) {
|
||||
|
||||
ColumnValueProvider valueProvider = sessionOps.getValueProvider();
|
||||
Object[] arr = new Object[props.size()];
|
||||
ColumnValueProvider valueProvider = sessionOps.getValueProvider();
|
||||
Object[] arr = new Object[props.size()];
|
||||
|
||||
int i = 0;
|
||||
for (HelenusPropertyNode p : props) {
|
||||
Object value = valueProvider.getColumnValue(source, -1, p.getProperty());
|
||||
arr[i++] = value;
|
||||
}
|
||||
int i = 0;
|
||||
for (HelenusPropertyNode p : props) {
|
||||
Object value = valueProvider.getColumnValue(source, -1, p.getProperty());
|
||||
arr[i++] = value;
|
||||
}
|
||||
|
||||
return (E) Fun.ArrayTuple.of(arr);
|
||||
}
|
||||
};
|
||||
}
|
||||
return (E) Fun.ArrayTuple.of(arr);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public SelectOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity) {
|
||||
public SelectOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity) {
|
||||
|
||||
super(sessionOperations);
|
||||
super(sessionOperations);
|
||||
|
||||
entity
|
||||
.getOrderedProperties()
|
||||
.stream()
|
||||
.map(p -> new HelenusPropertyNode(p, Optional.empty()))
|
||||
.forEach(p -> this.props.add(p));
|
||||
}
|
||||
entity.getOrderedProperties().stream().map(p -> new HelenusPropertyNode(p, Optional.empty()))
|
||||
.forEach(p -> this.props.add(p));
|
||||
|
||||
public SelectOperation(
|
||||
AbstractSessionOperations sessionOperations,
|
||||
HelenusEntity entity,
|
||||
Function<Row, E> rowMapper) {
|
||||
isCacheable = entity.isCacheable();
|
||||
}
|
||||
|
||||
super(sessionOperations);
|
||||
this.rowMapper = rowMapper;
|
||||
public SelectOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity,
|
||||
Function<Row, E> rowMapper) {
|
||||
|
||||
entity
|
||||
.getOrderedProperties()
|
||||
.stream()
|
||||
.map(p -> new HelenusPropertyNode(p, Optional.empty()))
|
||||
.forEach(p -> this.props.add(p));
|
||||
}
|
||||
super(sessionOperations);
|
||||
this.rowMapper = rowMapper;
|
||||
|
||||
public SelectOperation(
|
||||
AbstractSessionOperations sessionOperations,
|
||||
Function<Row, E> rowMapper,
|
||||
HelenusPropertyNode... props) {
|
||||
entity.getOrderedProperties().stream().map(p -> new HelenusPropertyNode(p, Optional.empty()))
|
||||
.forEach(p -> this.props.add(p));
|
||||
|
||||
super(sessionOperations);
|
||||
isCacheable = entity.isCacheable();
|
||||
}
|
||||
|
||||
this.rowMapper = rowMapper;
|
||||
Collections.addAll(this.props, props);
|
||||
}
|
||||
public SelectOperation(AbstractSessionOperations sessionOperations, Function<Row, E> rowMapper,
|
||||
HelenusPropertyNode... props) {
|
||||
|
||||
public CountOperation count() {
|
||||
super(sessionOperations);
|
||||
|
||||
HelenusEntity entity = null;
|
||||
for (HelenusPropertyNode prop : props) {
|
||||
this.rowMapper = rowMapper;
|
||||
Collections.addAll(this.props, props);
|
||||
}
|
||||
|
||||
if (entity == null) {
|
||||
entity = prop.getEntity();
|
||||
} else if (entity != prop.getEntity()) {
|
||||
throw new HelenusMappingException(
|
||||
"you can count records only from a single entity "
|
||||
+ entity.getMappingInterface()
|
||||
+ " or "
|
||||
+ prop.getEntity().getMappingInterface());
|
||||
}
|
||||
}
|
||||
public CountOperation count() {
|
||||
|
||||
return new CountOperation(sessionOps, entity);
|
||||
}
|
||||
HelenusEntity entity = null;
|
||||
for (HelenusPropertyNode prop : props) {
|
||||
|
||||
public <V extends E> SelectOperation<E> from(Class<V> materializedViewClass) {
|
||||
Objects.requireNonNull(materializedViewClass);
|
||||
HelenusEntity entity = Helenus.entity(materializedViewClass);
|
||||
this.alternateTableName = entity.getName().toCql();
|
||||
this.allowFiltering = true;
|
||||
return this;
|
||||
}
|
||||
if (entity == null) {
|
||||
entity = prop.getEntity();
|
||||
} else if (entity != prop.getEntity()) {
|
||||
throw new HelenusMappingException("you can count records only from a single entity "
|
||||
+ entity.getMappingInterface() + " or " + prop.getEntity().getMappingInterface());
|
||||
}
|
||||
}
|
||||
|
||||
public SelectOperation<E> from(String alternateTableName) {
|
||||
this.alternateTableName = alternateTableName;
|
||||
return this;
|
||||
}
|
||||
return new CountOperation(sessionOps, entity);
|
||||
}
|
||||
|
||||
public SelectFirstOperation<E> single() {
|
||||
limit(1);
|
||||
return new SelectFirstOperation<E>(this);
|
||||
}
|
||||
public <V extends E> SelectOperation<E> from(Class<V> materializedViewClass) {
|
||||
Objects.requireNonNull(materializedViewClass);
|
||||
HelenusEntity entity = Helenus.entity(materializedViewClass);
|
||||
this.alternateTableName = entity.getName().toCql();
|
||||
this.props.clear();
|
||||
entity.getOrderedProperties().stream().map(p -> new HelenusPropertyNode(p, Optional.empty()))
|
||||
.forEach(p -> this.props.add(p));
|
||||
return this;
|
||||
}
|
||||
|
||||
public <R> SelectTransformingOperation<R, E> mapTo(Class<R> entityClass) {
|
||||
public SelectFirstOperation<E> single() {
|
||||
limit(1);
|
||||
return new SelectFirstOperation<E>(this);
|
||||
}
|
||||
|
||||
Objects.requireNonNull(entityClass, "entityClass is null");
|
||||
public <R> SelectTransformingOperation<R, E> mapTo(Class<R> entityClass) {
|
||||
|
||||
HelenusEntity entity = Helenus.entity(entityClass);
|
||||
Objects.requireNonNull(entityClass, "entityClass is null");
|
||||
|
||||
this.rowMapper = null;
|
||||
HelenusEntity entity = Helenus.entity(entityClass);
|
||||
|
||||
return new SelectTransformingOperation<R, E>(
|
||||
this,
|
||||
(r) -> {
|
||||
Map<String, Object> map = new ValueProviderMap(r, sessionOps.getValueProvider(), entity);
|
||||
return (R) Helenus.map(entityClass, map);
|
||||
});
|
||||
}
|
||||
this.rowMapper = null;
|
||||
|
||||
public <R> SelectTransformingOperation<R, E> map(Function<E, R> fn) {
|
||||
return new SelectTransformingOperation<R, E>(this, fn);
|
||||
}
|
||||
return new SelectTransformingOperation<R, E>(this, (r) -> {
|
||||
Map<String, Object> map = new ValueProviderMap(r, sessionOps.getValueProvider(), entity);
|
||||
return (R) Helenus.map(entityClass, map);
|
||||
});
|
||||
}
|
||||
|
||||
public SelectOperation<E> column(Getter<?> getter) {
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(getter);
|
||||
this.props.add(p);
|
||||
return this;
|
||||
}
|
||||
public <R> SelectTransformingOperation<R, E> map(Function<E, R> fn) {
|
||||
return new SelectTransformingOperation<R, E>(this, fn);
|
||||
}
|
||||
|
||||
public SelectOperation<E> orderBy(Getter<?> getter, OrderingDirection direction) {
|
||||
getOrCreateOrdering().add(new Ordered(getter, direction).getOrdering());
|
||||
return this;
|
||||
}
|
||||
public SelectOperation<E> column(Getter<?> getter) {
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(getter);
|
||||
this.props.add(p);
|
||||
return this;
|
||||
}
|
||||
|
||||
public SelectOperation<E> orderBy(Ordered ordered) {
|
||||
getOrCreateOrdering().add(ordered.getOrdering());
|
||||
return this;
|
||||
}
|
||||
public SelectOperation<E> orderBy(Getter<?> getter, OrderingDirection direction) {
|
||||
getOrCreateOrdering().add(new Ordered(getter, direction).getOrdering());
|
||||
return this;
|
||||
}
|
||||
|
||||
public SelectOperation<E> limit(Integer limit) {
|
||||
this.limit = limit;
|
||||
return this;
|
||||
}
|
||||
public SelectOperation<E> orderBy(Ordered ordered) {
|
||||
getOrCreateOrdering().add(ordered.getOrdering());
|
||||
return this;
|
||||
}
|
||||
|
||||
public SelectOperation<E> allowFiltering() {
|
||||
this.allowFiltering = true;
|
||||
return this;
|
||||
}
|
||||
public SelectOperation<E> limit(Integer limit) {
|
||||
this.limit = limit;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getStatementCacheKey() {
|
||||
List<String> keys = new ArrayList<>(filters.size());
|
||||
HelenusEntity entity = props.get(0).getEntity();
|
||||
public SelectOperation<E> allowFiltering() {
|
||||
this.allowFiltering = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
for (HelenusPropertyNode prop : props) {
|
||||
switch (prop.getProperty().getColumnType()) {
|
||||
case PARTITION_KEY:
|
||||
case CLUSTERING_COLUMN:
|
||||
Filter filter = filters.get(prop.getProperty());
|
||||
if (filter != null) {
|
||||
keys.add(filter.toString());
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
if (keys.size() > 0) {
|
||||
return entity.getName() + ": " + Joiner.on(",").join(keys);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
@Override
|
||||
public boolean isSessionCacheable() {
|
||||
return isCacheable;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BuiltStatement buildStatement(boolean cached) {
|
||||
@Override
|
||||
public List<Facet> getFacets() {
|
||||
HelenusEntity entity = props.get(0).getEntity();
|
||||
return entity.getFacets();
|
||||
}
|
||||
|
||||
HelenusEntity entity = null;
|
||||
Selection selection = QueryBuilder.select();
|
||||
@Override
|
||||
public List<Facet> bindFacetValues() {
|
||||
HelenusEntity entity = props.get(0).getEntity();
|
||||
List<Facet> boundFacets = new ArrayList<>();
|
||||
|
||||
for (HelenusPropertyNode prop : props) {
|
||||
String columnName = prop.getColumnName();
|
||||
selection = selection.column(columnName);
|
||||
for (Facet facet : entity.getFacets()) {
|
||||
if (facet instanceof UnboundFacet) {
|
||||
UnboundFacet unboundFacet = (UnboundFacet) facet;
|
||||
UnboundFacet.Binder binder = unboundFacet.binder();
|
||||
unboundFacet.getProperties().forEach(prop -> {
|
||||
Filter filter = filters.get(prop);
|
||||
if (filter != null) {
|
||||
Object[] postulates = filter.postulateValues();
|
||||
for (Object p : postulates) {
|
||||
binder.setValueForProperty(prop, p.toString());
|
||||
}
|
||||
}
|
||||
|
||||
if (prop.getProperty().caseSensitiveIndex()) {
|
||||
allowFiltering = true;
|
||||
}
|
||||
});
|
||||
if (binder.isBound()) {
|
||||
boundFacets.add(binder.bind());
|
||||
}
|
||||
} else {
|
||||
boundFacets.add(facet);
|
||||
}
|
||||
}
|
||||
return boundFacets;
|
||||
}
|
||||
|
||||
if (entity == null) {
|
||||
entity = prop.getEntity();
|
||||
} else if (entity != prop.getEntity()) {
|
||||
throw new HelenusMappingException(
|
||||
"you can select columns only from a single entity "
|
||||
+ entity.getMappingInterface()
|
||||
+ " or "
|
||||
+ prop.getEntity().getMappingInterface());
|
||||
}
|
||||
@Override
|
||||
public BuiltStatement buildStatement(boolean cached) {
|
||||
|
||||
if (cached) {
|
||||
switch (prop.getProperty().getColumnType()) {
|
||||
case PARTITION_KEY:
|
||||
case CLUSTERING_COLUMN:
|
||||
break;
|
||||
default:
|
||||
if (entity.equals(prop.getEntity())) {
|
||||
if (prop.getNext().isPresent()) {
|
||||
columnName = Iterables.getLast(prop).getColumnName().toCql(true);
|
||||
}
|
||||
if (!prop.getProperty().getDataType().isCollectionType()) {
|
||||
selection.writeTime(columnName).as(columnName + "_writeTime");
|
||||
selection.ttl(columnName).as(columnName + "_ttl");
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
HelenusEntity entity = null;
|
||||
Selection selection = QueryBuilder.select();
|
||||
|
||||
if (entity == null) {
|
||||
throw new HelenusMappingException("no entity or table to select data");
|
||||
}
|
||||
for (HelenusPropertyNode prop : props) {
|
||||
String columnName = prop.getColumnName();
|
||||
selection = selection.column(columnName);
|
||||
|
||||
String tableName = alternateTableName == null ? entity.getName().toCql() : alternateTableName;
|
||||
Select select = selection.from(tableName);
|
||||
if (prop.getProperty().caseSensitiveIndex()) {
|
||||
allowFiltering = true;
|
||||
}
|
||||
|
||||
if (ordering != null && !ordering.isEmpty()) {
|
||||
select.orderBy(ordering.toArray(new Ordering[ordering.size()]));
|
||||
}
|
||||
if (entity == null) {
|
||||
entity = prop.getEntity();
|
||||
} else if (entity != prop.getEntity()) {
|
||||
throw new HelenusMappingException("you can select columns only from a single entity "
|
||||
+ entity.getMappingInterface() + " or " + prop.getEntity().getMappingInterface());
|
||||
}
|
||||
|
||||
if (limit != null) {
|
||||
select.limit(limit);
|
||||
}
|
||||
if (cached) {
|
||||
switch (prop.getProperty().getColumnType()) {
|
||||
case PARTITION_KEY :
|
||||
case CLUSTERING_COLUMN :
|
||||
break;
|
||||
default :
|
||||
if (entity.equals(prop.getEntity())) {
|
||||
if (prop.getNext().isPresent()) {
|
||||
columnName = Iterables.getLast(prop).getColumnName().toCql(true);
|
||||
}
|
||||
if (!prop.getProperty().getDataType().isCollectionType()) {
|
||||
selection.writeTime(columnName).as(columnName + "_writeTime");
|
||||
selection.ttl(columnName).as(columnName + "_ttl");
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (filters != null && !filters.isEmpty()) {
|
||||
if (entity == null) {
|
||||
throw new HelenusMappingException("no entity or table to select data");
|
||||
}
|
||||
|
||||
Where where = select.where();
|
||||
String tableName = alternateTableName == null ? entity.getName().toCql() : alternateTableName;
|
||||
Select select = selection.from(tableName);
|
||||
|
||||
for (Filter<?> filter : filters.values()) {
|
||||
where.and(filter.getClause(sessionOps.getValuePreparer()));
|
||||
}
|
||||
}
|
||||
if (ordering != null && !ordering.isEmpty()) {
|
||||
select.orderBy(ordering.toArray(new Ordering[ordering.size()]));
|
||||
}
|
||||
|
||||
if (ifFilters != null && !ifFilters.isEmpty()) {
|
||||
logger.error(
|
||||
"onlyIf conditions " + ifFilters + " would be ignored in the statement " + select);
|
||||
}
|
||||
if (limit != null) {
|
||||
select.limit(limit);
|
||||
}
|
||||
|
||||
if (allowFiltering) {
|
||||
select.allowFiltering();
|
||||
}
|
||||
if (filters != null && !filters.isEmpty()) {
|
||||
|
||||
return select;
|
||||
}
|
||||
Where where = select.where();
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public Stream<E> transform(ResultSet resultSet) {
|
||||
if (rowMapper != null) {
|
||||
return StreamSupport.stream(
|
||||
Spliterators.spliteratorUnknownSize(resultSet.iterator(), Spliterator.ORDERED), false)
|
||||
.map(rowMapper);
|
||||
} else {
|
||||
return (Stream<E>)
|
||||
StreamSupport.stream(
|
||||
Spliterators.spliteratorUnknownSize(resultSet.iterator(), Spliterator.ORDERED),
|
||||
false);
|
||||
}
|
||||
}
|
||||
for (Filter<?> filter : filters.values()) {
|
||||
where.and(filter.getClause(sessionOps.getValuePreparer()));
|
||||
}
|
||||
}
|
||||
|
||||
private List<Ordering> getOrCreateOrdering() {
|
||||
if (ordering == null) {
|
||||
ordering = new ArrayList<Ordering>();
|
||||
}
|
||||
return ordering;
|
||||
}
|
||||
if (ifFilters != null && !ifFilters.isEmpty()) {
|
||||
LOG.error("onlyIf conditions " + ifFilters + " would be ignored in the statement " + select);
|
||||
}
|
||||
|
||||
if (allowFiltering) {
|
||||
select.allowFiltering();
|
||||
}
|
||||
|
||||
return select;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public Stream<E> transform(ResultSet resultSet) {
|
||||
if (rowMapper != null) {
|
||||
return StreamSupport
|
||||
.stream(Spliterators.spliteratorUnknownSize(resultSet.iterator(), Spliterator.ORDERED), false)
|
||||
.map(rowMapper);
|
||||
} else {
|
||||
return (Stream<E>) StreamSupport
|
||||
.stream(Spliterators.spliteratorUnknownSize(resultSet.iterator(), Spliterator.ORDERED), false);
|
||||
}
|
||||
}
|
||||
|
||||
private List<Ordering> getOrCreateOrdering() {
|
||||
if (ordering == null) {
|
||||
ordering = new ArrayList<Ordering>();
|
||||
}
|
||||
return ordering;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,38 +15,48 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
import java.util.List;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
|
||||
import net.helenus.core.cache.Facet;
|
||||
|
||||
public final class SelectTransformingOperation<R, E>
|
||||
extends AbstractFilterStreamOperation<R, SelectTransformingOperation<R, E>> {
|
||||
extends
|
||||
AbstractFilterStreamOperation<R, SelectTransformingOperation<R, E>> {
|
||||
|
||||
private final SelectOperation<E> delegate;
|
||||
private final Function<E, R> fn;
|
||||
private final SelectOperation<E> delegate;
|
||||
private final Function<E, R> fn;
|
||||
|
||||
public SelectTransformingOperation(SelectOperation<E> delegate, Function<E, R> fn) {
|
||||
super(delegate.sessionOps);
|
||||
public SelectTransformingOperation(SelectOperation<E> delegate, Function<E, R> fn) {
|
||||
super(delegate.sessionOps);
|
||||
|
||||
this.delegate = delegate;
|
||||
this.fn = fn;
|
||||
this.filters = delegate.filters;
|
||||
this.ifFilters = delegate.ifFilters;
|
||||
}
|
||||
this.delegate = delegate;
|
||||
this.fn = fn;
|
||||
this.filters = delegate.filters;
|
||||
this.ifFilters = delegate.ifFilters;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getStatementCacheKey() {
|
||||
return delegate.getStatementCacheKey();
|
||||
}
|
||||
@Override
|
||||
public List<Facet> bindFacetValues() {
|
||||
return delegate.bindFacetValues();
|
||||
}
|
||||
|
||||
@Override
|
||||
public BuiltStatement buildStatement(boolean cached) {
|
||||
return delegate.buildStatement(cached);
|
||||
}
|
||||
@Override
|
||||
public List<Facet> getFacets() {
|
||||
return delegate.getFacets();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Stream<R> transform(ResultSet resultSet) {
|
||||
return delegate.transform(resultSet).map(fn);
|
||||
}
|
||||
@Override
|
||||
public BuiltStatement buildStatement(boolean cached) {
|
||||
return delegate.buildStatement(cached);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Stream<R> transform(ResultSet resultSet) {
|
||||
return delegate.transform(resultSet).map(fn);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,13 +15,15 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.function.Function;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.querybuilder.Assignment;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
import com.datastax.driver.core.querybuilder.QueryBuilder;
|
||||
import com.datastax.driver.core.querybuilder.Update;
|
||||
import java.util.*;
|
||||
import java.util.function.Function;
|
||||
|
||||
import net.helenus.core.*;
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
|
@ -32,565 +34,549 @@ import net.helenus.support.Immutables;
|
|||
|
||||
public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateOperation<E>> {
|
||||
|
||||
private HelenusEntity entity = null;
|
||||
private final List<Assignment> assignments = new ArrayList<Assignment>();
|
||||
private final AbstractEntityDraft<E> draft;
|
||||
private final Map<String, Object> draftMap;
|
||||
private HelenusEntity entity = null;
|
||||
private int[] ttl;
|
||||
private long[] timestamp;
|
||||
|
||||
private final List<Assignment> assignments = new ArrayList<Assignment>();
|
||||
private final AbstractEntityDraft<E> draft;
|
||||
private final Map<String, Object> draftMap;
|
||||
public UpdateOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
this.draft = null;
|
||||
this.draftMap = null;
|
||||
}
|
||||
|
||||
private int[] ttl;
|
||||
private long[] timestamp;
|
||||
public UpdateOperation(AbstractSessionOperations sessionOperations, AbstractEntityDraft<E> draft) {
|
||||
super(sessionOperations);
|
||||
this.draft = draft;
|
||||
this.draftMap = draft.toMap();
|
||||
}
|
||||
|
||||
public UpdateOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
this.draft = null;
|
||||
this.draftMap = null;
|
||||
}
|
||||
public UpdateOperation(AbstractSessionOperations sessionOperations, HelenusPropertyNode p, Object v) {
|
||||
super(sessionOperations);
|
||||
this.draft = null;
|
||||
this.draftMap = null;
|
||||
|
||||
public UpdateOperation(
|
||||
AbstractSessionOperations sessionOperations, AbstractEntityDraft<E> draft) {
|
||||
super(sessionOperations);
|
||||
this.draft = draft;
|
||||
this.draftMap = draft.toMap();
|
||||
}
|
||||
Object value = sessionOps.getValuePreparer().prepareColumnValue(v, p.getProperty());
|
||||
assignments.add(QueryBuilder.set(p.getColumnName(), value));
|
||||
|
||||
public UpdateOperation(
|
||||
AbstractSessionOperations sessionOperations, HelenusPropertyNode p, Object v) {
|
||||
super(sessionOperations);
|
||||
this.draft = null;
|
||||
this.draftMap = null;
|
||||
addPropertyNode(p);
|
||||
}
|
||||
|
||||
Object value = sessionOps.getValuePreparer().prepareColumnValue(v, p.getProperty());
|
||||
assignments.add(QueryBuilder.set(p.getColumnName(), value));
|
||||
public <V> UpdateOperation<E> set(Getter<V> getter, V v) {
|
||||
Objects.requireNonNull(getter, "getter is empty");
|
||||
|
||||
addPropertyNode(p);
|
||||
}
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(getter);
|
||||
|
||||
public <V> UpdateOperation<E> set(Getter<V> getter, V v) {
|
||||
Objects.requireNonNull(getter, "getter is empty");
|
||||
Object value = sessionOps.getValuePreparer().prepareColumnValue(v, p.getProperty());
|
||||
assignments.add(QueryBuilder.set(p.getColumnName(), value));
|
||||
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(getter);
|
||||
addPropertyNode(p);
|
||||
|
||||
Object value = sessionOps.getValuePreparer().prepareColumnValue(v, p.getProperty());
|
||||
assignments.add(QueryBuilder.set(p.getColumnName(), value));
|
||||
return this;
|
||||
}
|
||||
|
||||
addPropertyNode(p);
|
||||
/*
|
||||
*
|
||||
*
|
||||
* COUNTER
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
return this;
|
||||
}
|
||||
public <V> UpdateOperation<E> increment(Getter<V> counterGetter) {
|
||||
return increment(counterGetter, 1L);
|
||||
}
|
||||
|
||||
/*
|
||||
*
|
||||
*
|
||||
* COUNTER
|
||||
*
|
||||
*
|
||||
*/
|
||||
public <V> UpdateOperation<E> increment(Getter<V> counterGetter, long delta) {
|
||||
|
||||
public <V> UpdateOperation<E> increment(Getter<V> counterGetter) {
|
||||
return increment(counterGetter, 1L);
|
||||
}
|
||||
Objects.requireNonNull(counterGetter, "counterGetter is empty");
|
||||
|
||||
public <V> UpdateOperation<E> increment(Getter<V> counterGetter, long delta) {
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(counterGetter);
|
||||
|
||||
Objects.requireNonNull(counterGetter, "counterGetter is empty");
|
||||
assignments.add(QueryBuilder.incr(p.getColumnName(), delta));
|
||||
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(counterGetter);
|
||||
addPropertyNode(p);
|
||||
|
||||
assignments.add(QueryBuilder.incr(p.getColumnName(), delta));
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
draftMap.put(key, (Long) draftMap.get(key) + delta);
|
||||
}
|
||||
|
||||
addPropertyNode(p);
|
||||
return this;
|
||||
}
|
||||
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
draftMap.put(key, (Long) draftMap.get(key) + delta);
|
||||
}
|
||||
public <V> UpdateOperation<E> decrement(Getter<V> counterGetter) {
|
||||
return decrement(counterGetter, 1L);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
public <V> UpdateOperation<E> decrement(Getter<V> counterGetter, long delta) {
|
||||
|
||||
public <V> UpdateOperation<E> decrement(Getter<V> counterGetter) {
|
||||
return decrement(counterGetter, 1L);
|
||||
}
|
||||
Objects.requireNonNull(counterGetter, "counterGetter is empty");
|
||||
|
||||
public <V> UpdateOperation<E> decrement(Getter<V> counterGetter, long delta) {
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(counterGetter);
|
||||
|
||||
Objects.requireNonNull(counterGetter, "counterGetter is empty");
|
||||
assignments.add(QueryBuilder.decr(p.getColumnName(), delta));
|
||||
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(counterGetter);
|
||||
addPropertyNode(p);
|
||||
|
||||
assignments.add(QueryBuilder.decr(p.getColumnName(), delta));
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
draftMap.put(key, (Long) draftMap.get(key) - delta);
|
||||
}
|
||||
|
||||
addPropertyNode(p);
|
||||
return this;
|
||||
}
|
||||
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
draftMap.put(key, (Long) draftMap.get(key) - delta);
|
||||
}
|
||||
/*
|
||||
*
|
||||
*
|
||||
* LIST
|
||||
*
|
||||
*/
|
||||
|
||||
return this;
|
||||
}
|
||||
public <V> UpdateOperation<E> prepend(Getter<List<V>> listGetter, V value) {
|
||||
|
||||
/*
|
||||
*
|
||||
*
|
||||
* LIST
|
||||
*
|
||||
*/
|
||||
Objects.requireNonNull(listGetter, "listGetter is empty");
|
||||
Objects.requireNonNull(value, "value is empty");
|
||||
|
||||
public <V> UpdateOperation<E> prepend(Getter<List<V>> listGetter, V value) {
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
Object valueObj = prepareSingleListValue(p, value);
|
||||
|
||||
Objects.requireNonNull(listGetter, "listGetter is empty");
|
||||
Objects.requireNonNull(value, "value is empty");
|
||||
assignments.add(QueryBuilder.prepend(p.getColumnName(), valueObj));
|
||||
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
Object valueObj = prepareSingleListValue(p, value);
|
||||
addPropertyNode(p);
|
||||
|
||||
assignments.add(QueryBuilder.prepend(p.getColumnName(), valueObj));
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
list.add(0, value);
|
||||
}
|
||||
|
||||
addPropertyNode(p);
|
||||
return this;
|
||||
}
|
||||
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
list.add(0, value);
|
||||
}
|
||||
public <V> UpdateOperation<E> prependAll(Getter<List<V>> listGetter, List<V> value) {
|
||||
|
||||
return this;
|
||||
}
|
||||
Objects.requireNonNull(listGetter, "listGetter is empty");
|
||||
Objects.requireNonNull(value, "value is empty");
|
||||
|
||||
public <V> UpdateOperation<E> prependAll(Getter<List<V>> listGetter, List<V> value) {
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
List valueObj = prepareListValue(p, value);
|
||||
|
||||
Objects.requireNonNull(listGetter, "listGetter is empty");
|
||||
Objects.requireNonNull(value, "value is empty");
|
||||
assignments.add(QueryBuilder.prependAll(p.getColumnName(), valueObj));
|
||||
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
List valueObj = prepareListValue(p, value);
|
||||
addPropertyNode(p);
|
||||
|
||||
assignments.add(QueryBuilder.prependAll(p.getColumnName(), valueObj));
|
||||
if (draft != null && value.size() > 0) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
list.addAll(0, value);
|
||||
}
|
||||
|
||||
addPropertyNode(p);
|
||||
return this;
|
||||
}
|
||||
|
||||
if (draft != null && value.size() > 0) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
list.addAll(0, value);
|
||||
}
|
||||
public <V> UpdateOperation<E> setIdx(Getter<List<V>> listGetter, int idx, V value) {
|
||||
|
||||
return this;
|
||||
}
|
||||
Objects.requireNonNull(listGetter, "listGetter is empty");
|
||||
Objects.requireNonNull(value, "value is empty");
|
||||
|
||||
public <V> UpdateOperation<E> setIdx(Getter<List<V>> listGetter, int idx, V value) {
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
Object valueObj = prepareSingleListValue(p, value);
|
||||
|
||||
Objects.requireNonNull(listGetter, "listGetter is empty");
|
||||
Objects.requireNonNull(value, "value is empty");
|
||||
assignments.add(QueryBuilder.setIdx(p.getColumnName(), idx, valueObj));
|
||||
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
Object valueObj = prepareSingleListValue(p, value);
|
||||
addPropertyNode(p);
|
||||
|
||||
assignments.add(QueryBuilder.setIdx(p.getColumnName(), idx, valueObj));
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
if (idx < 0) {
|
||||
list.add(0, value);
|
||||
} else if (idx > list.size()) {
|
||||
list.add(list.size(), value);
|
||||
} else {
|
||||
list.add(idx, value);
|
||||
}
|
||||
list.add(0, value);
|
||||
}
|
||||
|
||||
addPropertyNode(p);
|
||||
return this;
|
||||
}
|
||||
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
if (idx < 0) {
|
||||
list.add(0, value);
|
||||
} else if (idx > list.size()) {
|
||||
list.add(list.size(), value);
|
||||
} else {
|
||||
list.add(idx, value);
|
||||
}
|
||||
list.add(0, value);
|
||||
}
|
||||
public <V> UpdateOperation<E> append(Getter<List<V>> listGetter, V value) {
|
||||
|
||||
return this;
|
||||
}
|
||||
Objects.requireNonNull(listGetter, "listGetter is empty");
|
||||
Objects.requireNonNull(value, "value is empty");
|
||||
|
||||
public <V> UpdateOperation<E> append(Getter<List<V>> listGetter, V value) {
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
Object valueObj = prepareSingleListValue(p, value);
|
||||
|
||||
Objects.requireNonNull(listGetter, "listGetter is empty");
|
||||
Objects.requireNonNull(value, "value is empty");
|
||||
assignments.add(QueryBuilder.append(p.getColumnName(), valueObj));
|
||||
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
Object valueObj = prepareSingleListValue(p, value);
|
||||
addPropertyNode(p);
|
||||
|
||||
assignments.add(QueryBuilder.append(p.getColumnName(), valueObj));
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
list.add(value);
|
||||
}
|
||||
|
||||
addPropertyNode(p);
|
||||
return this;
|
||||
}
|
||||
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
list.add(value);
|
||||
}
|
||||
public <V> UpdateOperation<E> appendAll(Getter<List<V>> listGetter, List<V> value) {
|
||||
|
||||
return this;
|
||||
}
|
||||
Objects.requireNonNull(listGetter, "listGetter is empty");
|
||||
Objects.requireNonNull(value, "value is empty");
|
||||
|
||||
public <V> UpdateOperation<E> appendAll(Getter<List<V>> listGetter, List<V> value) {
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
List valueObj = prepareListValue(p, value);
|
||||
|
||||
Objects.requireNonNull(listGetter, "listGetter is empty");
|
||||
Objects.requireNonNull(value, "value is empty");
|
||||
assignments.add(QueryBuilder.appendAll(p.getColumnName(), valueObj));
|
||||
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
List valueObj = prepareListValue(p, value);
|
||||
addPropertyNode(p);
|
||||
|
||||
assignments.add(QueryBuilder.appendAll(p.getColumnName(), valueObj));
|
||||
if (draft != null && value.size() > 0) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
list.addAll(value);
|
||||
}
|
||||
|
||||
addPropertyNode(p);
|
||||
return this;
|
||||
}
|
||||
|
||||
if (draft != null && value.size() > 0) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
list.addAll(value);
|
||||
}
|
||||
public <V> UpdateOperation<E> discard(Getter<List<V>> listGetter, V value) {
|
||||
|
||||
return this;
|
||||
}
|
||||
Objects.requireNonNull(listGetter, "listGetter is empty");
|
||||
Objects.requireNonNull(value, "value is empty");
|
||||
|
||||
public <V> UpdateOperation<E> discard(Getter<List<V>> listGetter, V value) {
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
Object valueObj = prepareSingleListValue(p, value);
|
||||
|
||||
Objects.requireNonNull(listGetter, "listGetter is empty");
|
||||
Objects.requireNonNull(value, "value is empty");
|
||||
assignments.add(QueryBuilder.discard(p.getColumnName(), valueObj));
|
||||
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
Object valueObj = prepareSingleListValue(p, value);
|
||||
addPropertyNode(p);
|
||||
|
||||
assignments.add(QueryBuilder.discard(p.getColumnName(), valueObj));
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
list.remove(value);
|
||||
}
|
||||
|
||||
addPropertyNode(p);
|
||||
return this;
|
||||
}
|
||||
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
list.remove(value);
|
||||
}
|
||||
public <V> UpdateOperation<E> discardAll(Getter<List<V>> listGetter, List<V> value) {
|
||||
|
||||
return this;
|
||||
}
|
||||
Objects.requireNonNull(listGetter, "listGetter is empty");
|
||||
Objects.requireNonNull(value, "value is empty");
|
||||
|
||||
public <V> UpdateOperation<E> discardAll(Getter<List<V>> listGetter, List<V> value) {
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
List valueObj = prepareListValue(p, value);
|
||||
|
||||
Objects.requireNonNull(listGetter, "listGetter is empty");
|
||||
Objects.requireNonNull(value, "value is empty");
|
||||
assignments.add(QueryBuilder.discardAll(p.getColumnName(), valueObj));
|
||||
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
List valueObj = prepareListValue(p, value);
|
||||
addPropertyNode(p);
|
||||
|
||||
assignments.add(QueryBuilder.discardAll(p.getColumnName(), valueObj));
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
list.removeAll(value);
|
||||
}
|
||||
|
||||
addPropertyNode(p);
|
||||
return this;
|
||||
}
|
||||
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
list.removeAll(value);
|
||||
}
|
||||
private Object prepareSingleListValue(HelenusPropertyNode p, Object value) {
|
||||
HelenusProperty prop = p.getProperty();
|
||||
|
||||
return this;
|
||||
}
|
||||
Object valueObj = value;
|
||||
|
||||
private Object prepareSingleListValue(HelenusPropertyNode p, Object value) {
|
||||
HelenusProperty prop = p.getProperty();
|
||||
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
List convertedList = (List) converter.get().apply(Immutables.listOf(value));
|
||||
valueObj = convertedList.get(0);
|
||||
}
|
||||
|
||||
Object valueObj = value;
|
||||
return valueObj;
|
||||
}
|
||||
|
||||
Optional<Function<Object, Object>> converter =
|
||||
prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
List convertedList = (List) converter.get().apply(Immutables.listOf(value));
|
||||
valueObj = convertedList.get(0);
|
||||
}
|
||||
private List prepareListValue(HelenusPropertyNode p, List value) {
|
||||
|
||||
return valueObj;
|
||||
}
|
||||
HelenusProperty prop = p.getProperty();
|
||||
|
||||
private List prepareListValue(HelenusPropertyNode p, List value) {
|
||||
List valueObj = value;
|
||||
|
||||
HelenusProperty prop = p.getProperty();
|
||||
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
valueObj = (List) converter.get().apply(value);
|
||||
}
|
||||
|
||||
List valueObj = value;
|
||||
return valueObj;
|
||||
}
|
||||
|
||||
Optional<Function<Object, Object>> converter =
|
||||
prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
valueObj = (List) converter.get().apply(value);
|
||||
}
|
||||
/*
|
||||
*
|
||||
*
|
||||
* SET
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
return valueObj;
|
||||
}
|
||||
public <V> UpdateOperation<E> add(Getter<Set<V>> setGetter, V value) {
|
||||
|
||||
/*
|
||||
*
|
||||
*
|
||||
* SET
|
||||
*
|
||||
*
|
||||
*/
|
||||
Objects.requireNonNull(setGetter, "setGetter is empty");
|
||||
Objects.requireNonNull(value, "value is empty");
|
||||
|
||||
public <V> UpdateOperation<E> add(Getter<Set<V>> setGetter, V value) {
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
|
||||
Object valueObj = prepareSingleSetValue(p, value);
|
||||
|
||||
Objects.requireNonNull(setGetter, "setGetter is empty");
|
||||
Objects.requireNonNull(value, "value is empty");
|
||||
assignments.add(QueryBuilder.add(p.getColumnName(), valueObj));
|
||||
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
|
||||
Object valueObj = prepareSingleSetValue(p, value);
|
||||
addPropertyNode(p);
|
||||
|
||||
assignments.add(QueryBuilder.add(p.getColumnName(), valueObj));
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
Set<V> set = (Set<V>) draftMap.get(key);
|
||||
set.add(value);
|
||||
}
|
||||
|
||||
addPropertyNode(p);
|
||||
return this;
|
||||
}
|
||||
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
Set<V> set = (Set<V>) draftMap.get(key);
|
||||
set.add(value);
|
||||
}
|
||||
public <V> UpdateOperation<E> addAll(Getter<Set<V>> setGetter, Set<V> value) {
|
||||
|
||||
return this;
|
||||
}
|
||||
Objects.requireNonNull(setGetter, "setGetter is empty");
|
||||
Objects.requireNonNull(value, "value is empty");
|
||||
|
||||
public <V> UpdateOperation<E> addAll(Getter<Set<V>> setGetter, Set<V> value) {
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
|
||||
Set valueObj = prepareSetValue(p, value);
|
||||
|
||||
Objects.requireNonNull(setGetter, "setGetter is empty");
|
||||
Objects.requireNonNull(value, "value is empty");
|
||||
assignments.add(QueryBuilder.addAll(p.getColumnName(), valueObj));
|
||||
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
|
||||
Set valueObj = prepareSetValue(p, value);
|
||||
addPropertyNode(p);
|
||||
|
||||
assignments.add(QueryBuilder.addAll(p.getColumnName(), valueObj));
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
Set<V> set = (Set<V>) draftMap.get(key);
|
||||
set.addAll(value);
|
||||
}
|
||||
|
||||
addPropertyNode(p);
|
||||
return this;
|
||||
}
|
||||
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
Set<V> set = (Set<V>) draftMap.get(key);
|
||||
set.addAll(value);
|
||||
}
|
||||
public <V> UpdateOperation<E> remove(Getter<Set<V>> setGetter, V value) {
|
||||
|
||||
return this;
|
||||
}
|
||||
Objects.requireNonNull(setGetter, "setGetter is empty");
|
||||
Objects.requireNonNull(value, "value is empty");
|
||||
|
||||
public <V> UpdateOperation<E> remove(Getter<Set<V>> setGetter, V value) {
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
|
||||
Object valueObj = prepareSingleSetValue(p, value);
|
||||
|
||||
Objects.requireNonNull(setGetter, "setGetter is empty");
|
||||
Objects.requireNonNull(value, "value is empty");
|
||||
assignments.add(QueryBuilder.remove(p.getColumnName(), valueObj));
|
||||
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
|
||||
Object valueObj = prepareSingleSetValue(p, value);
|
||||
addPropertyNode(p);
|
||||
|
||||
assignments.add(QueryBuilder.remove(p.getColumnName(), valueObj));
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
Set<V> set = (Set<V>) draftMap.get(key);
|
||||
set.remove(value);
|
||||
}
|
||||
|
||||
addPropertyNode(p);
|
||||
return this;
|
||||
}
|
||||
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
Set<V> set = (Set<V>) draftMap.get(key);
|
||||
set.remove(value);
|
||||
}
|
||||
public <V> UpdateOperation<E> removeAll(Getter<Set<V>> setGetter, Set<V> value) {
|
||||
|
||||
return this;
|
||||
}
|
||||
Objects.requireNonNull(setGetter, "setGetter is empty");
|
||||
Objects.requireNonNull(value, "value is empty");
|
||||
|
||||
public <V> UpdateOperation<E> removeAll(Getter<Set<V>> setGetter, Set<V> value) {
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
|
||||
Set valueObj = prepareSetValue(p, value);
|
||||
|
||||
Objects.requireNonNull(setGetter, "setGetter is empty");
|
||||
Objects.requireNonNull(value, "value is empty");
|
||||
assignments.add(QueryBuilder.removeAll(p.getColumnName(), valueObj));
|
||||
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
|
||||
Set valueObj = prepareSetValue(p, value);
|
||||
addPropertyNode(p);
|
||||
|
||||
assignments.add(QueryBuilder.removeAll(p.getColumnName(), valueObj));
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
Set<V> set = (Set<V>) draftMap.get(key);
|
||||
set.removeAll(value);
|
||||
}
|
||||
|
||||
addPropertyNode(p);
|
||||
return this;
|
||||
}
|
||||
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
Set<V> set = (Set<V>) draftMap.get(key);
|
||||
set.removeAll(value);
|
||||
}
|
||||
private Object prepareSingleSetValue(HelenusPropertyNode p, Object value) {
|
||||
|
||||
return this;
|
||||
}
|
||||
HelenusProperty prop = p.getProperty();
|
||||
Object valueObj = value;
|
||||
|
||||
private Object prepareSingleSetValue(HelenusPropertyNode p, Object value) {
|
||||
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
Set convertedSet = (Set) converter.get().apply(Immutables.setOf(value));
|
||||
valueObj = convertedSet.iterator().next();
|
||||
}
|
||||
|
||||
HelenusProperty prop = p.getProperty();
|
||||
Object valueObj = value;
|
||||
return valueObj;
|
||||
}
|
||||
|
||||
Optional<Function<Object, Object>> converter =
|
||||
prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
Set convertedSet = (Set) converter.get().apply(Immutables.setOf(value));
|
||||
valueObj = convertedSet.iterator().next();
|
||||
}
|
||||
private Set prepareSetValue(HelenusPropertyNode p, Set value) {
|
||||
|
||||
return valueObj;
|
||||
}
|
||||
HelenusProperty prop = p.getProperty();
|
||||
Set valueObj = value;
|
||||
|
||||
private Set prepareSetValue(HelenusPropertyNode p, Set value) {
|
||||
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
valueObj = (Set) converter.get().apply(value);
|
||||
}
|
||||
|
||||
HelenusProperty prop = p.getProperty();
|
||||
Set valueObj = value;
|
||||
return valueObj;
|
||||
}
|
||||
|
||||
Optional<Function<Object, Object>> converter =
|
||||
prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
valueObj = (Set) converter.get().apply(value);
|
||||
}
|
||||
/*
|
||||
*
|
||||
*
|
||||
* MAP
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
return valueObj;
|
||||
}
|
||||
public <K, V> UpdateOperation<E> put(Getter<Map<K, V>> mapGetter, K key, V value) {
|
||||
|
||||
/*
|
||||
*
|
||||
*
|
||||
* MAP
|
||||
*
|
||||
*
|
||||
*/
|
||||
Objects.requireNonNull(mapGetter, "mapGetter is empty");
|
||||
Objects.requireNonNull(key, "key is empty");
|
||||
|
||||
public <K, V> UpdateOperation<E> put(Getter<Map<K, V>> mapGetter, K key, V value) {
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(mapGetter);
|
||||
HelenusProperty prop = p.getProperty();
|
||||
|
||||
Objects.requireNonNull(mapGetter, "mapGetter is empty");
|
||||
Objects.requireNonNull(key, "key is empty");
|
||||
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
Map<Object, Object> convertedMap = (Map<Object, Object>) converter.get()
|
||||
.apply(Immutables.mapOf(key, value));
|
||||
for (Map.Entry<Object, Object> e : convertedMap.entrySet()) {
|
||||
assignments.add(QueryBuilder.put(p.getColumnName(), e.getKey(), e.getValue()));
|
||||
}
|
||||
} else {
|
||||
assignments.add(QueryBuilder.put(p.getColumnName(), key, value));
|
||||
}
|
||||
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(mapGetter);
|
||||
HelenusProperty prop = p.getProperty();
|
||||
addPropertyNode(p);
|
||||
|
||||
Optional<Function<Object, Object>> converter =
|
||||
prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
Map<Object, Object> convertedMap =
|
||||
(Map<Object, Object>) converter.get().apply(Immutables.mapOf(key, value));
|
||||
for (Map.Entry<Object, Object> e : convertedMap.entrySet()) {
|
||||
assignments.add(QueryBuilder.put(p.getColumnName(), e.getKey(), e.getValue()));
|
||||
}
|
||||
} else {
|
||||
assignments.add(QueryBuilder.put(p.getColumnName(), key, value));
|
||||
}
|
||||
if (draft != null) {
|
||||
((Map<K, V>) draftMap.get(prop.getPropertyName())).put(key, value);
|
||||
}
|
||||
|
||||
addPropertyNode(p);
|
||||
return this;
|
||||
}
|
||||
|
||||
if (draft != null) {
|
||||
((Map<K, V>) draftMap.get(prop.getPropertyName())).put(key, value);
|
||||
}
|
||||
public <K, V> UpdateOperation<E> putAll(Getter<Map<K, V>> mapGetter, Map<K, V> map) {
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
public <K, V> UpdateOperation<E> putAll(Getter<Map<K, V>> mapGetter, Map<K, V> map) {
|
||||
|
||||
Objects.requireNonNull(mapGetter, "mapGetter is empty");
|
||||
Objects.requireNonNull(map, "map is empty");
|
||||
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(mapGetter);
|
||||
HelenusProperty prop = p.getProperty();
|
||||
|
||||
Optional<Function<Object, Object>> converter =
|
||||
prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
Map convertedMap = (Map) converter.get().apply(map);
|
||||
assignments.add(QueryBuilder.putAll(p.getColumnName(), convertedMap));
|
||||
} else {
|
||||
assignments.add(QueryBuilder.putAll(p.getColumnName(), map));
|
||||
}
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
((Map<K, V>) draftMap.get(prop.getPropertyName())).putAll(map);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BuiltStatement buildStatement(boolean cached) {
|
||||
|
||||
if (entity == null) {
|
||||
throw new HelenusMappingException("empty update operation");
|
||||
}
|
||||
|
||||
Update update = QueryBuilder.update(entity.getName().toCql());
|
||||
|
||||
for (Assignment assignment : assignments) {
|
||||
update.with(assignment);
|
||||
}
|
||||
|
||||
if (filters != null && !filters.isEmpty()) {
|
||||
|
||||
for (Filter<?> filter : filters) {
|
||||
update.where(filter.getClause(sessionOps.getValuePreparer()));
|
||||
}
|
||||
}
|
||||
|
||||
if (ifFilters != null && !ifFilters.isEmpty()) {
|
||||
|
||||
for (Filter<?> filter : ifFilters) {
|
||||
update.onlyIf(filter.getClause(sessionOps.getValuePreparer()));
|
||||
}
|
||||
}
|
||||
|
||||
if (this.ttl != null) {
|
||||
update.using(QueryBuilder.ttl(this.ttl[0]));
|
||||
}
|
||||
|
||||
if (this.timestamp != null) {
|
||||
update.using(QueryBuilder.timestamp(this.timestamp[0]));
|
||||
}
|
||||
|
||||
return update;
|
||||
}
|
||||
|
||||
@Override
|
||||
public E transform(ResultSet resultSet) {
|
||||
if (draft != null) {
|
||||
return Helenus.map(draft.getEntityClass(), draft.toMap(draftMap));
|
||||
} else {
|
||||
return (E) resultSet;
|
||||
}
|
||||
}
|
||||
|
||||
public UpdateOperation<E> usingTtl(int ttl) {
|
||||
this.ttl = new int[1];
|
||||
this.ttl[0] = ttl;
|
||||
return this;
|
||||
}
|
||||
|
||||
public UpdateOperation<E> usingTimestamp(long timestamp) {
|
||||
this.timestamp = new long[1];
|
||||
this.timestamp[0] = timestamp;
|
||||
return this;
|
||||
}
|
||||
|
||||
private void addPropertyNode(HelenusPropertyNode p) {
|
||||
if (entity == null) {
|
||||
entity = p.getEntity();
|
||||
} else if (entity != p.getEntity()) {
|
||||
throw new HelenusMappingException(
|
||||
"you can update columns only in single entity "
|
||||
+ entity.getMappingInterface()
|
||||
+ " or "
|
||||
+ p.getEntity().getMappingInterface());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public E sync(UnitOfWork uow) {
|
||||
if (uow == null) {
|
||||
return sync();
|
||||
}
|
||||
E result = super.sync(uow);
|
||||
if (draft != null) {
|
||||
String key = getStatementCacheKey();
|
||||
if (key != null) {
|
||||
Set<Object> set = new HashSet<Object>(1);
|
||||
set.add(result);
|
||||
uow.getCache().put(key, set);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
Objects.requireNonNull(mapGetter, "mapGetter is empty");
|
||||
Objects.requireNonNull(map, "map is empty");
|
||||
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(mapGetter);
|
||||
HelenusProperty prop = p.getProperty();
|
||||
|
||||
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
Map convertedMap = (Map) converter.get().apply(map);
|
||||
assignments.add(QueryBuilder.putAll(p.getColumnName(), convertedMap));
|
||||
} else {
|
||||
assignments.add(QueryBuilder.putAll(p.getColumnName(), map));
|
||||
}
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
((Map<K, V>) draftMap.get(prop.getPropertyName())).putAll(map);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BuiltStatement buildStatement(boolean cached) {
|
||||
|
||||
if (entity == null) {
|
||||
throw new HelenusMappingException("empty update operation");
|
||||
}
|
||||
|
||||
Update update = QueryBuilder.update(entity.getName().toCql());
|
||||
|
||||
for (Assignment assignment : assignments) {
|
||||
update.with(assignment);
|
||||
}
|
||||
|
||||
if (filters != null && !filters.isEmpty()) {
|
||||
|
||||
for (Filter<?> filter : filters) {
|
||||
update.where(filter.getClause(sessionOps.getValuePreparer()));
|
||||
}
|
||||
}
|
||||
|
||||
if (ifFilters != null && !ifFilters.isEmpty()) {
|
||||
|
||||
for (Filter<?> filter : ifFilters) {
|
||||
update.onlyIf(filter.getClause(sessionOps.getValuePreparer()));
|
||||
}
|
||||
}
|
||||
|
||||
if (this.ttl != null) {
|
||||
update.using(QueryBuilder.ttl(this.ttl[0]));
|
||||
}
|
||||
|
||||
if (this.timestamp != null) {
|
||||
update.using(QueryBuilder.timestamp(this.timestamp[0]));
|
||||
}
|
||||
|
||||
return update;
|
||||
}
|
||||
|
||||
@Override
|
||||
public E transform(ResultSet resultSet) {
|
||||
if (draft != null) {
|
||||
return Helenus.map(draft.getEntityClass(), draft.toMap(draftMap));
|
||||
} else {
|
||||
return (E) resultSet;
|
||||
}
|
||||
}
|
||||
|
||||
public UpdateOperation<E> usingTtl(int ttl) {
|
||||
this.ttl = new int[1];
|
||||
this.ttl[0] = ttl;
|
||||
return this;
|
||||
}
|
||||
|
||||
public UpdateOperation<E> usingTimestamp(long timestamp) {
|
||||
this.timestamp = new long[1];
|
||||
this.timestamp[0] = timestamp;
|
||||
return this;
|
||||
}
|
||||
|
||||
private void addPropertyNode(HelenusPropertyNode p) {
|
||||
if (entity == null) {
|
||||
entity = p.getEntity();
|
||||
} else if (entity != p.getEntity()) {
|
||||
throw new HelenusMappingException("you can update columns only in single entity "
|
||||
+ entity.getMappingInterface() + " or " + p.getEntity().getMappingInterface());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public E sync(UnitOfWork uow) {// throws TimeoutException {
|
||||
if (uow == null) {
|
||||
return sync();
|
||||
}
|
||||
E result = super.sync(uow);
|
||||
// TODO(gburd): Only drafted entity objects are updated in the cache at this
|
||||
// time.
|
||||
if (draft != null) {
|
||||
updateCache(uow, result, getFacets());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,41 +19,34 @@ import java.util.HashMap;
|
|||
import java.util.Map;
|
||||
|
||||
public enum DefaultPrimitiveTypes {
|
||||
BOOLEAN(boolean.class, false),
|
||||
BYTE(byte.class, (byte) 0x0),
|
||||
CHAR(char.class, (char) 0x0),
|
||||
SHORT(short.class, (short) 0),
|
||||
INT(int.class, 0),
|
||||
LONG(long.class, 0L),
|
||||
FLOAT(float.class, 0.0f),
|
||||
DOUBLE(double.class, 0.0);
|
||||
BOOLEAN(boolean.class, false), BYTE(byte.class, (byte) 0x0), CHAR(char.class, (char) 0x0), SHORT(short.class,
|
||||
(short) 0), INT(int.class, 0), LONG(long.class, 0L), FLOAT(float.class, 0.0f), DOUBLE(double.class, 0.0);
|
||||
|
||||
private final Class<?> primitiveClass;
|
||||
private final Object defaultValue;
|
||||
private static final Map<Class<?>, DefaultPrimitiveTypes> map = new HashMap<Class<?>, DefaultPrimitiveTypes>();
|
||||
|
||||
private static final Map<Class<?>, DefaultPrimitiveTypes> map =
|
||||
new HashMap<Class<?>, DefaultPrimitiveTypes>();
|
||||
static {
|
||||
for (DefaultPrimitiveTypes type : DefaultPrimitiveTypes.values()) {
|
||||
map.put(type.getPrimitiveClass(), type);
|
||||
}
|
||||
}
|
||||
|
||||
static {
|
||||
for (DefaultPrimitiveTypes type : DefaultPrimitiveTypes.values()) {
|
||||
map.put(type.getPrimitiveClass(), type);
|
||||
}
|
||||
}
|
||||
private final Class<?> primitiveClass;
|
||||
private final Object defaultValue;
|
||||
|
||||
private DefaultPrimitiveTypes(Class<?> primitiveClass, Object defaultValue) {
|
||||
this.primitiveClass = primitiveClass;
|
||||
this.defaultValue = defaultValue;
|
||||
}
|
||||
private DefaultPrimitiveTypes(Class<?> primitiveClass, Object defaultValue) {
|
||||
this.primitiveClass = primitiveClass;
|
||||
this.defaultValue = defaultValue;
|
||||
}
|
||||
|
||||
public static DefaultPrimitiveTypes lookup(Class<?> primitiveClass) {
|
||||
return map.get(primitiveClass);
|
||||
}
|
||||
public static DefaultPrimitiveTypes lookup(Class<?> primitiveClass) {
|
||||
return map.get(primitiveClass);
|
||||
}
|
||||
|
||||
public Class<?> getPrimitiveClass() {
|
||||
return primitiveClass;
|
||||
}
|
||||
public Class<?> getPrimitiveClass() {
|
||||
return primitiveClass;
|
||||
}
|
||||
|
||||
public Object getDefaultValue() {
|
||||
return defaultValue;
|
||||
}
|
||||
public Object getDefaultValue() {
|
||||
return defaultValue;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,11 +1,25 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.helenus.core.reflect;
|
||||
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
public interface Drafted<T> extends MapExportable {
|
||||
|
||||
Set<String> mutated();
|
||||
Set<String> mutated();
|
||||
|
||||
T build();
|
||||
T build();
|
||||
}
|
||||
|
|
|
@ -16,17 +16,18 @@
|
|||
package net.helenus.core.reflect;
|
||||
|
||||
import com.datastax.driver.core.Metadata;
|
||||
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
|
||||
public interface DslExportable {
|
||||
|
||||
public static final String GET_ENTITY_METHOD = "getHelenusMappingEntity";
|
||||
public static final String GET_PARENT_METHOD = "getParentDslHelenusPropertyNode";
|
||||
public static final String SET_METADATA_METHOD = "setCassandraMetadataForHelenusSesion";
|
||||
String GET_ENTITY_METHOD = "getHelenusMappingEntity";
|
||||
String GET_PARENT_METHOD = "getParentDslHelenusPropertyNode";
|
||||
String SET_METADATA_METHOD = "setCassandraMetadataForHelenusSession";
|
||||
|
||||
HelenusEntity getHelenusMappingEntity();
|
||||
HelenusEntity getHelenusMappingEntity();
|
||||
|
||||
HelenusPropertyNode getParentDslHelenusPropertyNode();
|
||||
HelenusPropertyNode getParentDslHelenusPropertyNode();
|
||||
|
||||
void setCassandraMetadataForHelenusSesion(Metadata metadata);
|
||||
void setCassandraMetadataForHelenusSession(Metadata metadata);
|
||||
}
|
||||
|
|
|
@ -15,13 +15,15 @@
|
|||
*/
|
||||
package net.helenus.core.reflect;
|
||||
|
||||
import com.datastax.driver.core.*;
|
||||
import java.lang.reflect.InvocationHandler;
|
||||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.Proxy;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
import com.datastax.driver.core.*;
|
||||
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.mapping.HelenusMappingEntity;
|
||||
|
@ -34,180 +36,162 @@ import net.helenus.support.HelenusException;
|
|||
|
||||
public class DslInvocationHandler<E> implements InvocationHandler {
|
||||
|
||||
private HelenusEntity entity = null;
|
||||
private Metadata metadata = null;
|
||||
private final Class<E> iface;
|
||||
private final ClassLoader classLoader;
|
||||
private final Optional<HelenusPropertyNode> parent;
|
||||
private final Map<Method, HelenusProperty> map = new HashMap<Method, HelenusProperty>();
|
||||
private final Map<Method, Object> udtMap = new HashMap<Method, Object>();
|
||||
private final Map<Method, Object> tupleMap = new HashMap<Method, Object>();
|
||||
private HelenusEntity entity = null;
|
||||
private Metadata metadata = null;
|
||||
|
||||
private final Class<E> iface;
|
||||
private final ClassLoader classLoader;
|
||||
public DslInvocationHandler(Class<E> iface, ClassLoader classLoader, Optional<HelenusPropertyNode> parent,
|
||||
Metadata metadata) {
|
||||
|
||||
private final Optional<HelenusPropertyNode> parent;
|
||||
this.metadata = metadata;
|
||||
this.parent = parent;
|
||||
this.iface = iface;
|
||||
this.classLoader = classLoader;
|
||||
}
|
||||
|
||||
private final Map<Method, HelenusProperty> map = new HashMap<Method, HelenusProperty>();
|
||||
public void setCassandraMetadataForHelenusSession(Metadata metadata) {
|
||||
if (metadata != null) {
|
||||
this.metadata = metadata;
|
||||
entity = init(metadata);
|
||||
}
|
||||
}
|
||||
|
||||
private final Map<Method, Object> udtMap = new HashMap<Method, Object>();
|
||||
private final Map<Method, Object> tupleMap = new HashMap<Method, Object>();
|
||||
private HelenusEntity init(Metadata metadata) {
|
||||
HelenusEntity entity = new HelenusMappingEntity(iface, metadata);
|
||||
|
||||
public DslInvocationHandler(
|
||||
Class<E> iface,
|
||||
ClassLoader classLoader,
|
||||
Optional<HelenusPropertyNode> parent,
|
||||
Metadata metadata) {
|
||||
for (HelenusProperty prop : entity.getOrderedProperties()) {
|
||||
|
||||
this.metadata = metadata;
|
||||
this.parent = parent;
|
||||
this.iface = iface;
|
||||
this.classLoader = classLoader;
|
||||
}
|
||||
map.put(prop.getGetterMethod(), prop);
|
||||
|
||||
public void setCassandraMetadataForHelenusSesion(Metadata metadata) {
|
||||
if (metadata != null) {
|
||||
this.metadata = metadata;
|
||||
entity = init(metadata);
|
||||
}
|
||||
}
|
||||
AbstractDataType type = prop.getDataType();
|
||||
Class<?> javaType = prop.getJavaType();
|
||||
|
||||
private HelenusEntity init(Metadata metadata) {
|
||||
HelenusEntity entity = new HelenusMappingEntity(iface, metadata);
|
||||
if (type instanceof UDTDataType && !UDTValue.class.isAssignableFrom(javaType)) {
|
||||
|
||||
for (HelenusProperty prop : entity.getOrderedProperties()) {
|
||||
Object childDsl = Helenus.dsl(javaType, classLoader, Optional.of(new HelenusPropertyNode(prop, parent)),
|
||||
metadata);
|
||||
|
||||
map.put(prop.getGetterMethod(), prop);
|
||||
udtMap.put(prop.getGetterMethod(), childDsl);
|
||||
}
|
||||
|
||||
AbstractDataType type = prop.getDataType();
|
||||
Class<?> javaType = prop.getJavaType();
|
||||
if (type instanceof DTDataType) {
|
||||
DTDataType dataType = (DTDataType) type;
|
||||
|
||||
if (type instanceof UDTDataType && !UDTValue.class.isAssignableFrom(javaType)) {
|
||||
if (dataType.getDataType() instanceof TupleType && !TupleValue.class.isAssignableFrom(javaType)) {
|
||||
|
||||
Object childDsl =
|
||||
Helenus.dsl(
|
||||
javaType,
|
||||
classLoader,
|
||||
Optional.of(new HelenusPropertyNode(prop, parent)),
|
||||
metadata);
|
||||
Object childDsl = Helenus.dsl(javaType, classLoader,
|
||||
Optional.of(new HelenusPropertyNode(prop, parent)), metadata);
|
||||
|
||||
udtMap.put(prop.getGetterMethod(), childDsl);
|
||||
}
|
||||
tupleMap.put(prop.getGetterMethod(), childDsl);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (type instanceof DTDataType) {
|
||||
DTDataType dataType = (DTDataType) type;
|
||||
return entity;
|
||||
}
|
||||
|
||||
if (dataType.getDataType() instanceof TupleType
|
||||
&& !TupleValue.class.isAssignableFrom(javaType)) {
|
||||
@Override
|
||||
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
|
||||
|
||||
Object childDsl =
|
||||
Helenus.dsl(
|
||||
javaType,
|
||||
classLoader,
|
||||
Optional.of(new HelenusPropertyNode(prop, parent)),
|
||||
metadata);
|
||||
HelenusEntity entity = this.entity;
|
||||
String methodName = method.getName();
|
||||
|
||||
tupleMap.put(prop.getGetterMethod(), childDsl);
|
||||
}
|
||||
}
|
||||
}
|
||||
if ("equals".equals(methodName) && method.getParameterCount() == 1) {
|
||||
Object otherObj = args[0];
|
||||
if (otherObj == null) {
|
||||
return false;
|
||||
}
|
||||
if (Proxy.isProxyClass(otherObj.getClass())) {
|
||||
return this == Proxy.getInvocationHandler(otherObj);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
return entity;
|
||||
}
|
||||
if (DslExportable.SET_METADATA_METHOD.equals(methodName) && args.length == 1 && args[0] instanceof Metadata) {
|
||||
if (metadata == null) {
|
||||
this.setCassandraMetadataForHelenusSession((Metadata) args[0]);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
|
||||
if (method.getParameterCount() != 0 || method.getReturnType() == void.class) {
|
||||
throw new HelenusException("invalid getter method " + method);
|
||||
}
|
||||
|
||||
HelenusEntity entity = this.entity;
|
||||
String methodName = method.getName();
|
||||
if ("hashCode".equals(methodName)) {
|
||||
return hashCode();
|
||||
}
|
||||
|
||||
if ("equals".equals(methodName) && method.getParameterCount() == 1) {
|
||||
Object otherObj = args[0];
|
||||
if (otherObj == null) {
|
||||
return false;
|
||||
}
|
||||
if (Proxy.isProxyClass(otherObj.getClass())) {
|
||||
return this == Proxy.getInvocationHandler(otherObj);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
if (DslExportable.GET_PARENT_METHOD.equals(methodName)) {
|
||||
return parent.get();
|
||||
}
|
||||
|
||||
if (DslExportable.SET_METADATA_METHOD.equals(methodName)
|
||||
&& args.length == 1
|
||||
&& args[0] instanceof Metadata) {
|
||||
if (metadata == null) {
|
||||
this.setCassandraMetadataForHelenusSesion((Metadata) args[0]);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
if (entity == null) {
|
||||
entity = init(metadata);
|
||||
}
|
||||
|
||||
if (method.getParameterCount() != 0 || method.getReturnType() == void.class) {
|
||||
throw new HelenusException("invalid getter method " + method);
|
||||
}
|
||||
if ("toString".equals(methodName)) {
|
||||
return entity.toString();
|
||||
}
|
||||
|
||||
if ("hashCode".equals(methodName)) {
|
||||
return hashCode();
|
||||
}
|
||||
if (DslExportable.GET_ENTITY_METHOD.equals(methodName)) {
|
||||
return entity;
|
||||
}
|
||||
|
||||
if (DslExportable.GET_PARENT_METHOD.equals(methodName)) {
|
||||
return parent.get();
|
||||
}
|
||||
HelenusProperty prop = map.get(method);
|
||||
if (prop == null) {
|
||||
prop = entity.getProperty(methodName);
|
||||
}
|
||||
|
||||
if (entity == null) {
|
||||
entity = init(metadata);
|
||||
}
|
||||
if (prop != null) {
|
||||
|
||||
if ("toString".equals(methodName)) {
|
||||
return entity.toString();
|
||||
}
|
||||
AbstractDataType type = prop.getDataType();
|
||||
|
||||
if (DslExportable.GET_ENTITY_METHOD.equals(methodName)) {
|
||||
return entity;
|
||||
}
|
||||
if (type instanceof UDTDataType) {
|
||||
|
||||
HelenusProperty prop = map.get(method);
|
||||
if (prop == null) {
|
||||
prop = entity.getProperty(methodName);
|
||||
}
|
||||
Object childDsl = udtMap.get(method);
|
||||
|
||||
if (prop != null) {
|
||||
if (childDsl != null) {
|
||||
return childDsl;
|
||||
}
|
||||
}
|
||||
|
||||
AbstractDataType type = prop.getDataType();
|
||||
if (type instanceof DTDataType) {
|
||||
DTDataType dataType = (DTDataType) type;
|
||||
DataType dt = dataType.getDataType();
|
||||
|
||||
if (type instanceof UDTDataType) {
|
||||
switch (dt.getName()) {
|
||||
case TUPLE :
|
||||
Object childDsl = tupleMap.get(method);
|
||||
|
||||
Object childDsl = udtMap.get(method);
|
||||
if (childDsl != null) {
|
||||
return childDsl;
|
||||
}
|
||||
|
||||
if (childDsl != null) {
|
||||
return childDsl;
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
if (type instanceof DTDataType) {
|
||||
DTDataType dataType = (DTDataType) type;
|
||||
DataType dt = dataType.getDataType();
|
||||
case SET :
|
||||
return new SetDsl(new HelenusPropertyNode(prop, parent));
|
||||
|
||||
switch (dt.getName()) {
|
||||
case TUPLE:
|
||||
Object childDsl = tupleMap.get(method);
|
||||
case LIST :
|
||||
return new ListDsl(new HelenusPropertyNode(prop, parent));
|
||||
|
||||
if (childDsl != null) {
|
||||
return childDsl;
|
||||
}
|
||||
case MAP :
|
||||
return new MapDsl(new HelenusPropertyNode(prop, parent));
|
||||
|
||||
break;
|
||||
default :
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
case SET:
|
||||
return new SetDsl(new HelenusPropertyNode(prop, parent));
|
||||
throw new DslPropertyException(new HelenusPropertyNode(prop, parent));
|
||||
}
|
||||
|
||||
case LIST:
|
||||
return new ListDsl(new HelenusPropertyNode(prop, parent));
|
||||
|
||||
case MAP:
|
||||
return new MapDsl(new HelenusPropertyNode(prop, parent));
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
throw new DslPropertyException(new HelenusPropertyNode(prop, parent));
|
||||
}
|
||||
|
||||
throw new HelenusException("invalid method call " + method);
|
||||
}
|
||||
throw new HelenusException("invalid method call " + method);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,7 +19,9 @@ import java.lang.annotation.Annotation;
|
|||
import java.lang.reflect.Method;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Function;
|
||||
|
||||
import javax.validation.ConstraintValidator;
|
||||
|
||||
import net.helenus.core.SessionRepository;
|
||||
import net.helenus.mapping.*;
|
||||
import net.helenus.mapping.type.AbstractDataType;
|
||||
|
@ -27,79 +29,79 @@ import net.helenus.support.HelenusMappingException;
|
|||
|
||||
public final class HelenusNamedProperty implements HelenusProperty {
|
||||
|
||||
private final String name;
|
||||
private final String name;
|
||||
|
||||
public HelenusNamedProperty(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
public HelenusNamedProperty(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public HelenusEntity getEntity() {
|
||||
throw new HelenusMappingException("will never called");
|
||||
}
|
||||
@Override
|
||||
public HelenusEntity getEntity() {
|
||||
throw new HelenusMappingException("will never called");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getPropertyName() {
|
||||
return name;
|
||||
}
|
||||
@Override
|
||||
public String getPropertyName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Method getGetterMethod() {
|
||||
throw new HelenusMappingException("will never called");
|
||||
}
|
||||
@Override
|
||||
public Method getGetterMethod() {
|
||||
throw new HelenusMappingException("will never called");
|
||||
}
|
||||
|
||||
@Override
|
||||
public IdentityName getColumnName() {
|
||||
return IdentityName.of(name, false);
|
||||
}
|
||||
@Override
|
||||
public IdentityName getColumnName() {
|
||||
return IdentityName.of(name, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<IdentityName> getIndexName() {
|
||||
return Optional.empty();
|
||||
}
|
||||
@Override
|
||||
public Optional<IdentityName> getIndexName() {
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean caseSensitiveIndex() {
|
||||
return false;
|
||||
}
|
||||
@Override
|
||||
public boolean caseSensitiveIndex() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<?> getJavaType() {
|
||||
throw new HelenusMappingException("will never called");
|
||||
}
|
||||
@Override
|
||||
public Class<?> getJavaType() {
|
||||
throw new HelenusMappingException("will never called");
|
||||
}
|
||||
|
||||
@Override
|
||||
public AbstractDataType getDataType() {
|
||||
throw new HelenusMappingException("will never called");
|
||||
}
|
||||
@Override
|
||||
public AbstractDataType getDataType() {
|
||||
throw new HelenusMappingException("will never called");
|
||||
}
|
||||
|
||||
@Override
|
||||
public ColumnType getColumnType() {
|
||||
return ColumnType.COLUMN;
|
||||
}
|
||||
@Override
|
||||
public ColumnType getColumnType() {
|
||||
return ColumnType.COLUMN;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getOrdinal() {
|
||||
return 0;
|
||||
}
|
||||
@Override
|
||||
public int getOrdinal() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public OrderingDirection getOrdering() {
|
||||
return OrderingDirection.ASC;
|
||||
}
|
||||
@Override
|
||||
public OrderingDirection getOrdering() {
|
||||
return OrderingDirection.ASC;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<Function<Object, Object>> getReadConverter(SessionRepository repository) {
|
||||
return Optional.empty();
|
||||
}
|
||||
@Override
|
||||
public Optional<Function<Object, Object>> getReadConverter(SessionRepository repository) {
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<Function<Object, Object>> getWriteConverter(SessionRepository repository) {
|
||||
return Optional.empty();
|
||||
}
|
||||
@Override
|
||||
public Optional<Function<Object, Object>> getWriteConverter(SessionRepository repository) {
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ConstraintValidator<? extends Annotation, ?>[] getValidators() {
|
||||
return MappingUtil.EMPTY_VALIDATORS;
|
||||
}
|
||||
@Override
|
||||
public ConstraintValidator<? extends Annotation, ?>[] getValidators() {
|
||||
return MappingUtil.EMPTY_VALIDATORS;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,89 +17,90 @@ package net.helenus.core.reflect;
|
|||
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
|
||||
public final class HelenusPropertyNode implements Iterable<HelenusProperty> {
|
||||
|
||||
private final HelenusProperty prop;
|
||||
private final Optional<HelenusPropertyNode> next;
|
||||
private final HelenusProperty prop;
|
||||
private final Optional<HelenusPropertyNode> next;
|
||||
|
||||
public HelenusPropertyNode(HelenusProperty prop, Optional<HelenusPropertyNode> next) {
|
||||
this.prop = prop;
|
||||
this.next = next;
|
||||
}
|
||||
public HelenusPropertyNode(HelenusProperty prop, Optional<HelenusPropertyNode> next) {
|
||||
this.prop = prop;
|
||||
this.next = next;
|
||||
}
|
||||
|
||||
public String getColumnName() {
|
||||
if (next.isPresent()) {
|
||||
public String getColumnName() {
|
||||
if (next.isPresent()) {
|
||||
|
||||
List<String> columnNames = new ArrayList<String>();
|
||||
for (HelenusProperty p : this) {
|
||||
columnNames.add(p.getColumnName().toCql(true));
|
||||
}
|
||||
Collections.reverse(columnNames);
|
||||
List<String> columnNames = new ArrayList<String>();
|
||||
for (HelenusProperty p : this) {
|
||||
columnNames.add(p.getColumnName().toCql(true));
|
||||
}
|
||||
Collections.reverse(columnNames);
|
||||
|
||||
if (prop instanceof HelenusNamedProperty) {
|
||||
int size = columnNames.size();
|
||||
StringBuilder str = new StringBuilder();
|
||||
for (int i = 0; i != size - 1; ++i) {
|
||||
if (str.length() != 0) {
|
||||
str.append(".");
|
||||
}
|
||||
str.append(columnNames.get(i));
|
||||
}
|
||||
str.append("[").append(columnNames.get(size - 1)).append("]");
|
||||
return str.toString();
|
||||
} else {
|
||||
return columnNames.stream().collect(Collectors.joining("."));
|
||||
}
|
||||
} else {
|
||||
return prop.getColumnName().toCql();
|
||||
}
|
||||
}
|
||||
if (prop instanceof HelenusNamedProperty) {
|
||||
int size = columnNames.size();
|
||||
StringBuilder str = new StringBuilder();
|
||||
for (int i = 0; i != size - 1; ++i) {
|
||||
if (str.length() != 0) {
|
||||
str.append(".");
|
||||
}
|
||||
str.append(columnNames.get(i));
|
||||
}
|
||||
str.append("[").append(columnNames.get(size - 1)).append("]");
|
||||
return str.toString();
|
||||
} else {
|
||||
return columnNames.stream().collect(Collectors.joining("."));
|
||||
}
|
||||
} else {
|
||||
return prop.getColumnName().toCql();
|
||||
}
|
||||
}
|
||||
|
||||
public HelenusEntity getEntity() {
|
||||
if (next.isPresent()) {
|
||||
HelenusProperty last = prop;
|
||||
for (HelenusProperty p : this) {
|
||||
last = p;
|
||||
}
|
||||
return last.getEntity();
|
||||
} else {
|
||||
return prop.getEntity();
|
||||
}
|
||||
}
|
||||
public HelenusEntity getEntity() {
|
||||
if (next.isPresent()) {
|
||||
HelenusProperty last = prop;
|
||||
for (HelenusProperty p : this) {
|
||||
last = p;
|
||||
}
|
||||
return last.getEntity();
|
||||
} else {
|
||||
return prop.getEntity();
|
||||
}
|
||||
}
|
||||
|
||||
public HelenusProperty getProperty() {
|
||||
return prop;
|
||||
}
|
||||
public HelenusProperty getProperty() {
|
||||
return prop;
|
||||
}
|
||||
|
||||
public Optional<HelenusPropertyNode> getNext() {
|
||||
return next;
|
||||
}
|
||||
public Optional<HelenusPropertyNode> getNext() {
|
||||
return next;
|
||||
}
|
||||
|
||||
public Iterator<HelenusProperty> iterator() {
|
||||
return new PropertyNodeIterator(Optional.of(this));
|
||||
}
|
||||
public Iterator<HelenusProperty> iterator() {
|
||||
return new PropertyNodeIterator(Optional.of(this));
|
||||
}
|
||||
|
||||
private static class PropertyNodeIterator implements Iterator<HelenusProperty> {
|
||||
private static class PropertyNodeIterator implements Iterator<HelenusProperty> {
|
||||
|
||||
private Optional<HelenusPropertyNode> next;
|
||||
private Optional<HelenusPropertyNode> next;
|
||||
|
||||
public PropertyNodeIterator(Optional<HelenusPropertyNode> next) {
|
||||
this.next = next;
|
||||
}
|
||||
public PropertyNodeIterator(Optional<HelenusPropertyNode> next) {
|
||||
this.next = next;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return next.isPresent();
|
||||
}
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return next.isPresent();
|
||||
}
|
||||
|
||||
@Override
|
||||
public HelenusProperty next() {
|
||||
HelenusPropertyNode node = next.get();
|
||||
next = node.next;
|
||||
return node.prop;
|
||||
}
|
||||
}
|
||||
@Override
|
||||
public HelenusProperty next() {
|
||||
HelenusPropertyNode node = next.get();
|
||||
next = node.next;
|
||||
return node.prop;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,164 +16,165 @@
|
|||
package net.helenus.core.reflect;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
import net.helenus.support.DslPropertyException;
|
||||
import net.helenus.support.HelenusMappingException;
|
||||
|
||||
public final class ListDsl<V> implements List<V> {
|
||||
|
||||
private final HelenusPropertyNode parent;
|
||||
private final HelenusPropertyNode parent;
|
||||
|
||||
public ListDsl(HelenusPropertyNode parent) {
|
||||
this.parent = parent;
|
||||
}
|
||||
public ListDsl(HelenusPropertyNode parent) {
|
||||
this.parent = parent;
|
||||
}
|
||||
|
||||
public HelenusPropertyNode getParent() {
|
||||
return parent;
|
||||
}
|
||||
public HelenusPropertyNode getParent() {
|
||||
return parent;
|
||||
}
|
||||
|
||||
@Override
|
||||
public V get(int index) {
|
||||
HelenusProperty prop = new HelenusNamedProperty(Integer.toString(index));
|
||||
throw new DslPropertyException(new HelenusPropertyNode(prop, Optional.of(parent)));
|
||||
}
|
||||
@Override
|
||||
public V get(int index) {
|
||||
HelenusProperty prop = new HelenusNamedProperty(Integer.toString(index));
|
||||
throw new DslPropertyException(new HelenusPropertyNode(prop, Optional.of(parent)));
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
throwShouldNeverCall();
|
||||
return 0;
|
||||
}
|
||||
@Override
|
||||
public int size() {
|
||||
throwShouldNeverCall();
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEmpty() {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
@Override
|
||||
public boolean isEmpty() {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean contains(Object o) {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
@Override
|
||||
public boolean contains(Object o) {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<V> iterator() {
|
||||
throwShouldNeverCall();
|
||||
return null;
|
||||
}
|
||||
@Override
|
||||
public Iterator<V> iterator() {
|
||||
throwShouldNeverCall();
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object[] toArray() {
|
||||
throwShouldNeverCall();
|
||||
return null;
|
||||
}
|
||||
@Override
|
||||
public Object[] toArray() {
|
||||
throwShouldNeverCall();
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T[] toArray(T[] a) {
|
||||
throwShouldNeverCall();
|
||||
return null;
|
||||
}
|
||||
@Override
|
||||
public <T> T[] toArray(T[] a) {
|
||||
throwShouldNeverCall();
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean add(V e) {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
@Override
|
||||
public boolean add(V e) {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean remove(Object o) {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
@Override
|
||||
public boolean remove(Object o) {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean containsAll(Collection<?> c) {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
@Override
|
||||
public boolean containsAll(Collection<?> c) {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean addAll(Collection<? extends V> c) {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
@Override
|
||||
public boolean addAll(Collection<? extends V> c) {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean addAll(int index, Collection<? extends V> c) {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
@Override
|
||||
public boolean addAll(int index, Collection<? extends V> c) {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean removeAll(Collection<?> c) {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
@Override
|
||||
public boolean removeAll(Collection<?> c) {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean retainAll(Collection<?> c) {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
@Override
|
||||
public boolean retainAll(Collection<?> c) {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
throwShouldNeverCall();
|
||||
}
|
||||
@Override
|
||||
public void clear() {
|
||||
throwShouldNeverCall();
|
||||
}
|
||||
|
||||
@Override
|
||||
public V set(int index, V element) {
|
||||
throwShouldNeverCall();
|
||||
return null;
|
||||
}
|
||||
@Override
|
||||
public V set(int index, V element) {
|
||||
throwShouldNeverCall();
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void add(int index, V element) {
|
||||
throwShouldNeverCall();
|
||||
}
|
||||
@Override
|
||||
public void add(int index, V element) {
|
||||
throwShouldNeverCall();
|
||||
}
|
||||
|
||||
@Override
|
||||
public V remove(int index) {
|
||||
throwShouldNeverCall();
|
||||
return null;
|
||||
}
|
||||
@Override
|
||||
public V remove(int index) {
|
||||
throwShouldNeverCall();
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int indexOf(Object o) {
|
||||
throwShouldNeverCall();
|
||||
return 0;
|
||||
}
|
||||
@Override
|
||||
public int indexOf(Object o) {
|
||||
throwShouldNeverCall();
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int lastIndexOf(Object o) {
|
||||
throwShouldNeverCall();
|
||||
return 0;
|
||||
}
|
||||
@Override
|
||||
public int lastIndexOf(Object o) {
|
||||
throwShouldNeverCall();
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ListIterator<V> listIterator() {
|
||||
throwShouldNeverCall();
|
||||
return null;
|
||||
}
|
||||
@Override
|
||||
public ListIterator<V> listIterator() {
|
||||
throwShouldNeverCall();
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ListIterator<V> listIterator(int index) {
|
||||
throwShouldNeverCall();
|
||||
return null;
|
||||
}
|
||||
@Override
|
||||
public ListIterator<V> listIterator(int index) {
|
||||
throwShouldNeverCall();
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<V> subList(int fromIndex, int toIndex) {
|
||||
throwShouldNeverCall();
|
||||
return null;
|
||||
}
|
||||
@Override
|
||||
public List<V> subList(int fromIndex, int toIndex) {
|
||||
throwShouldNeverCall();
|
||||
return null;
|
||||
}
|
||||
|
||||
private void throwShouldNeverCall() {
|
||||
throw new HelenusMappingException("should be never called");
|
||||
}
|
||||
private void throwShouldNeverCall() {
|
||||
throw new HelenusMappingException("should be never called");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ListDsl";
|
||||
}
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ListDsl";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,98 +19,99 @@ import java.util.Collection;
|
|||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
import net.helenus.support.DslPropertyException;
|
||||
import net.helenus.support.HelenusMappingException;
|
||||
|
||||
public final class MapDsl<K, V> implements Map<K, V> {
|
||||
|
||||
private final HelenusPropertyNode parent;
|
||||
private final HelenusPropertyNode parent;
|
||||
|
||||
public MapDsl(HelenusPropertyNode parent) {
|
||||
this.parent = parent;
|
||||
}
|
||||
public MapDsl(HelenusPropertyNode parent) {
|
||||
this.parent = parent;
|
||||
}
|
||||
|
||||
public HelenusPropertyNode getParent() {
|
||||
return parent;
|
||||
}
|
||||
public HelenusPropertyNode getParent() {
|
||||
return parent;
|
||||
}
|
||||
|
||||
@Override
|
||||
public V get(Object key) {
|
||||
HelenusProperty prop = new HelenusNamedProperty(key.toString());
|
||||
throw new DslPropertyException(new HelenusPropertyNode(prop, Optional.of(parent)));
|
||||
}
|
||||
@Override
|
||||
public V get(Object key) {
|
||||
HelenusProperty prop = new HelenusNamedProperty(key.toString());
|
||||
throw new DslPropertyException(new HelenusPropertyNode(prop, Optional.of(parent)));
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
throwShouldNeverCall();
|
||||
return 0;
|
||||
}
|
||||
@Override
|
||||
public int size() {
|
||||
throwShouldNeverCall();
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEmpty() {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
@Override
|
||||
public boolean isEmpty() {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean containsKey(Object key) {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
@Override
|
||||
public boolean containsKey(Object key) {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean containsValue(Object value) {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
@Override
|
||||
public boolean containsValue(Object value) {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public V put(K key, V value) {
|
||||
throwShouldNeverCall();
|
||||
return null;
|
||||
}
|
||||
@Override
|
||||
public V put(K key, V value) {
|
||||
throwShouldNeverCall();
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public V remove(Object key) {
|
||||
throwShouldNeverCall();
|
||||
return null;
|
||||
}
|
||||
@Override
|
||||
public V remove(Object key) {
|
||||
throwShouldNeverCall();
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void putAll(Map<? extends K, ? extends V> m) {
|
||||
throwShouldNeverCall();
|
||||
}
|
||||
@Override
|
||||
public void putAll(Map<? extends K, ? extends V> m) {
|
||||
throwShouldNeverCall();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
throwShouldNeverCall();
|
||||
}
|
||||
@Override
|
||||
public void clear() {
|
||||
throwShouldNeverCall();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<K> keySet() {
|
||||
throwShouldNeverCall();
|
||||
return null;
|
||||
}
|
||||
@Override
|
||||
public Set<K> keySet() {
|
||||
throwShouldNeverCall();
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<V> values() {
|
||||
throwShouldNeverCall();
|
||||
return null;
|
||||
}
|
||||
@Override
|
||||
public Collection<V> values() {
|
||||
throwShouldNeverCall();
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<java.util.Map.Entry<K, V>> entrySet() {
|
||||
throwShouldNeverCall();
|
||||
return null;
|
||||
}
|
||||
@Override
|
||||
public Set<java.util.Map.Entry<K, V>> entrySet() {
|
||||
throwShouldNeverCall();
|
||||
return null;
|
||||
}
|
||||
|
||||
private void throwShouldNeverCall() {
|
||||
throw new HelenusMappingException("should be never called");
|
||||
}
|
||||
private void throwShouldNeverCall() {
|
||||
throw new HelenusMappingException("should be never called");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "MapDsl";
|
||||
}
|
||||
@Override
|
||||
public String toString() {
|
||||
return "MapDsl";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@ import java.util.Map;
|
|||
|
||||
public interface MapExportable {
|
||||
|
||||
public static final String TO_MAP_METHOD = "toMap";
|
||||
public static final String TO_MAP_METHOD = "toMap";
|
||||
|
||||
Map<String, Object> toMap();
|
||||
Map<String, Object> toMap();
|
||||
}
|
||||
|
|
|
@ -23,106 +23,113 @@ import java.lang.reflect.Method;
|
|||
import java.lang.reflect.Proxy;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.mapping.annotation.Transient;
|
||||
import net.helenus.support.HelenusException;
|
||||
|
||||
public class MapperInvocationHandler<E> implements InvocationHandler, Serializable {
|
||||
private static final long serialVersionUID = -7044209982830584984L;
|
||||
private static final long serialVersionUID = -7044209982830584984L;
|
||||
|
||||
private final Map<String, Object> src;
|
||||
private final Class<E> iface;
|
||||
private final Map<String, Object> src;
|
||||
private final Class<E> iface;
|
||||
|
||||
public MapperInvocationHandler(Class<E> iface, Map<String, Object> src) {
|
||||
this.src = src;
|
||||
this.iface = iface;
|
||||
}
|
||||
public MapperInvocationHandler(Class<E> iface, Map<String, Object> src) {
|
||||
this.src = src;
|
||||
this.iface = iface;
|
||||
}
|
||||
|
||||
private Object invokeDefault(Object proxy, Method method, Object[] args) throws Throwable {
|
||||
// NOTE: This is reflection magic to invoke (non-recursively) a default method implemented on an interface
|
||||
// that we've proxied (in ReflectionDslInstantiator). I found the answer in this article.
|
||||
// https://zeroturnaround.com/rebellabs/recognize-and-conquer-java-proxies-default-methods-and-method-handles/
|
||||
private Object invokeDefault(Object proxy, Method method, Object[] args) throws Throwable {
|
||||
// NOTE: This is reflection magic to invoke (non-recursively) a default method
|
||||
// implemented on an interface
|
||||
// that we've proxied (in ReflectionDslInstantiator). I found the answer in this
|
||||
// article.
|
||||
// https://zeroturnaround.com/rebellabs/recognize-and-conquer-java-proxies-default-methods-and-method-handles/
|
||||
|
||||
// First, we need an instance of a private inner-class found in MethodHandles.
|
||||
Constructor<MethodHandles.Lookup> constructor =
|
||||
MethodHandles.Lookup.class.getDeclaredConstructor(Class.class, int.class);
|
||||
constructor.setAccessible(true);
|
||||
// First, we need an instance of a private inner-class found in MethodHandles.
|
||||
Constructor<MethodHandles.Lookup> constructor = MethodHandles.Lookup.class.getDeclaredConstructor(Class.class,
|
||||
int.class);
|
||||
constructor.setAccessible(true);
|
||||
|
||||
// Now we need to lookup and invoke special the default method on the interface class.
|
||||
final Class<?> declaringClass = method.getDeclaringClass();
|
||||
Object result =
|
||||
constructor
|
||||
.newInstance(declaringClass, MethodHandles.Lookup.PRIVATE)
|
||||
.unreflectSpecial(method, declaringClass)
|
||||
.bindTo(proxy)
|
||||
.invokeWithArguments(args);
|
||||
return result;
|
||||
}
|
||||
// Now we need to lookup and invoke special the default method on the interface
|
||||
// class.
|
||||
final Class<?> declaringClass = method.getDeclaringClass();
|
||||
Object result = constructor.newInstance(declaringClass, MethodHandles.Lookup.PRIVATE)
|
||||
.unreflectSpecial(method, declaringClass).bindTo(proxy).invokeWithArguments(args);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
|
||||
@Override
|
||||
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
|
||||
|
||||
// Transient, default methods should simply be invoked as-is.
|
||||
if (method.isDefault() && method.getDeclaredAnnotation(Transient.class) != null) {
|
||||
return invokeDefault(proxy, method, args);
|
||||
}
|
||||
// Transient, default methods should simply be invoked as-is.
|
||||
if (method.isDefault() && method.getDeclaredAnnotation(Transient.class) != null) {
|
||||
return invokeDefault(proxy, method, args);
|
||||
}
|
||||
|
||||
String methodName = method.getName();
|
||||
String methodName = method.getName();
|
||||
|
||||
if ("equals".equals(methodName) && method.getParameterCount() == 1) {
|
||||
Object otherObj = args[0];
|
||||
if (otherObj == null) {
|
||||
return false;
|
||||
}
|
||||
if (Proxy.isProxyClass(otherObj.getClass())) {
|
||||
return this == Proxy.getInvocationHandler(otherObj);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
if ("equals".equals(methodName) && method.getParameterCount() == 1) {
|
||||
Object otherObj = args[0];
|
||||
if (otherObj == null) {
|
||||
return false;
|
||||
}
|
||||
if (Proxy.isProxyClass(otherObj.getClass())) {
|
||||
if (this == Proxy.getInvocationHandler(otherObj)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
if (otherObj instanceof MapExportable && src.equals(((MapExportable) otherObj).toMap())) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
if (method.getParameterCount() != 0 || method.getReturnType() == void.class) {
|
||||
throw new HelenusException("invalid getter method " + method);
|
||||
}
|
||||
if (method.getParameterCount() != 0 || method.getReturnType() == void.class) {
|
||||
throw new HelenusException("invalid getter method " + method);
|
||||
}
|
||||
|
||||
if ("hashCode".equals(methodName)) {
|
||||
return hashCode();
|
||||
}
|
||||
if ("hashCode".equals(methodName)) {
|
||||
return hashCode();
|
||||
}
|
||||
|
||||
if ("toString".equals(methodName)) {
|
||||
return iface.getSimpleName() + ": " + src.toString();
|
||||
}
|
||||
if ("toString".equals(methodName)) {
|
||||
return iface.getSimpleName() + ": " + src.toString();
|
||||
}
|
||||
|
||||
if ("dsl".equals(methodName)) {
|
||||
return Helenus.dsl(iface);
|
||||
}
|
||||
if ("dsl".equals(methodName)) {
|
||||
return Helenus.dsl(iface);
|
||||
}
|
||||
|
||||
if (MapExportable.TO_MAP_METHOD.equals(methodName)) {
|
||||
return Collections.unmodifiableMap(src);
|
||||
}
|
||||
if (MapExportable.TO_MAP_METHOD.equals(methodName)) {
|
||||
return Collections.unmodifiableMap(src);
|
||||
}
|
||||
|
||||
Object value = src.get(methodName);
|
||||
Object value = src.get(methodName);
|
||||
|
||||
Class<?> returnType = method.getReturnType();
|
||||
Class<?> returnType = method.getReturnType();
|
||||
|
||||
if (value == null) {
|
||||
if (value == null) {
|
||||
|
||||
// Default implementations of non-Transient methods in entities are the default value when the
|
||||
// map contains 'null'.
|
||||
if (method.isDefault()) {
|
||||
return invokeDefault(proxy, method, args);
|
||||
}
|
||||
// Default implementations of non-Transient methods in entities are the default
|
||||
// value when the
|
||||
// map contains 'null'.
|
||||
if (method.isDefault()) {
|
||||
return invokeDefault(proxy, method, args);
|
||||
}
|
||||
|
||||
// Otherwise, if the return type of the method is a primitive Java type then we'll return the standard
|
||||
// default values to avoid a NPE in user code.
|
||||
if (returnType.isPrimitive()) {
|
||||
DefaultPrimitiveTypes type = DefaultPrimitiveTypes.lookup(returnType);
|
||||
if (type == null) {
|
||||
throw new HelenusException("unknown primitive type " + returnType);
|
||||
}
|
||||
return type.getDefaultValue();
|
||||
}
|
||||
}
|
||||
// Otherwise, if the return type of the method is a primitive Java type then
|
||||
// we'll return the standard
|
||||
// default values to avoid a NPE in user code.
|
||||
if (returnType.isPrimitive()) {
|
||||
DefaultPrimitiveTypes type = DefaultPrimitiveTypes.lookup(returnType);
|
||||
if (type == null) {
|
||||
throw new HelenusException("unknown primitive type " + returnType);
|
||||
}
|
||||
return type.getDefaultValue();
|
||||
}
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,25 +15,22 @@
|
|||
*/
|
||||
package net.helenus.core.reflect;
|
||||
|
||||
import com.datastax.driver.core.Metadata;
|
||||
import java.lang.reflect.Proxy;
|
||||
import java.util.Optional;
|
||||
|
||||
import com.datastax.driver.core.Metadata;
|
||||
|
||||
import net.helenus.core.DslInstantiator;
|
||||
|
||||
public enum ReflectionDslInstantiator implements DslInstantiator {
|
||||
INSTANCE;
|
||||
INSTANCE;
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public <E> E instantiate(
|
||||
Class<E> iface,
|
||||
ClassLoader classLoader,
|
||||
Optional<HelenusPropertyNode> parent,
|
||||
Metadata metadata) {
|
||||
DslInvocationHandler<E> handler =
|
||||
new DslInvocationHandler<E>(iface, classLoader, parent, metadata);
|
||||
E proxy =
|
||||
(E) Proxy.newProxyInstance(classLoader, new Class[] {iface, DslExportable.class}, handler);
|
||||
return proxy;
|
||||
}
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public <E> E instantiate(Class<E> iface, ClassLoader classLoader, Optional<HelenusPropertyNode> parent,
|
||||
Metadata metadata) {
|
||||
DslInvocationHandler<E> handler = new DslInvocationHandler<E>(iface, classLoader, parent, metadata);
|
||||
E proxy = (E) Proxy.newProxyInstance(classLoader, new Class[]{iface, DslExportable.class}, handler);
|
||||
return proxy;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,14 +19,15 @@ import net.helenus.support.HelenusMappingException;
|
|||
|
||||
public final class ReflectionInstantiator {
|
||||
|
||||
private ReflectionInstantiator() {}
|
||||
private ReflectionInstantiator() {
|
||||
}
|
||||
|
||||
public static <T> T instantiateClass(Class<T> clazz) {
|
||||
public static <T> T instantiateClass(Class<T> clazz) {
|
||||
|
||||
try {
|
||||
return clazz.newInstance();
|
||||
} catch (InstantiationException | IllegalAccessException e) {
|
||||
throw new HelenusMappingException("invalid class " + clazz, e);
|
||||
}
|
||||
}
|
||||
try {
|
||||
return clazz.newInstance();
|
||||
} catch (InstantiationException | IllegalAccessException e) {
|
||||
throw new HelenusMappingException("invalid class " + clazz, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,18 +17,18 @@ package net.helenus.core.reflect;
|
|||
|
||||
import java.lang.reflect.Proxy;
|
||||
import java.util.Map;
|
||||
|
||||
import net.helenus.core.MapperInstantiator;
|
||||
|
||||
public enum ReflectionMapperInstantiator implements MapperInstantiator {
|
||||
INSTANCE;
|
||||
INSTANCE;
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public <E> E instantiate(Class<E> iface, Map<String, Object> src, ClassLoader classLoader) {
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public <E> E instantiate(Class<E> iface, Map<String, Object> src, ClassLoader classLoader) {
|
||||
|
||||
MapperInvocationHandler<E> handler = new MapperInvocationHandler<E>(iface, src);
|
||||
E proxy =
|
||||
(E) Proxy.newProxyInstance(classLoader, new Class[] {iface, MapExportable.class}, handler);
|
||||
return proxy;
|
||||
}
|
||||
MapperInvocationHandler<E> handler = new MapperInvocationHandler<E>(iface, src);
|
||||
E proxy = (E) Proxy.newProxyInstance(classLoader, new Class[]{iface, MapExportable.class}, handler);
|
||||
return proxy;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,103 +18,104 @@ package net.helenus.core.reflect;
|
|||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
import java.util.Set;
|
||||
|
||||
import net.helenus.support.HelenusMappingException;
|
||||
|
||||
public final class SetDsl<V> implements Set<V> {
|
||||
|
||||
private final HelenusPropertyNode parent;
|
||||
private final HelenusPropertyNode parent;
|
||||
|
||||
public SetDsl(HelenusPropertyNode parent) {
|
||||
this.parent = parent;
|
||||
}
|
||||
public SetDsl(HelenusPropertyNode parent) {
|
||||
this.parent = parent;
|
||||
}
|
||||
|
||||
public HelenusPropertyNode getParent() {
|
||||
return parent;
|
||||
}
|
||||
public HelenusPropertyNode getParent() {
|
||||
return parent;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
throwShouldNeverCall();
|
||||
return 0;
|
||||
}
|
||||
@Override
|
||||
public int size() {
|
||||
throwShouldNeverCall();
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEmpty() {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
@Override
|
||||
public boolean isEmpty() {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean contains(Object o) {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
@Override
|
||||
public boolean contains(Object o) {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<V> iterator() {
|
||||
throwShouldNeverCall();
|
||||
return null;
|
||||
}
|
||||
@Override
|
||||
public Iterator<V> iterator() {
|
||||
throwShouldNeverCall();
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object[] toArray() {
|
||||
throwShouldNeverCall();
|
||||
return null;
|
||||
}
|
||||
@Override
|
||||
public Object[] toArray() {
|
||||
throwShouldNeverCall();
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T[] toArray(T[] a) {
|
||||
throwShouldNeverCall();
|
||||
return null;
|
||||
}
|
||||
@Override
|
||||
public <T> T[] toArray(T[] a) {
|
||||
throwShouldNeverCall();
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean add(V e) {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
@Override
|
||||
public boolean add(V e) {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean remove(Object o) {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
@Override
|
||||
public boolean remove(Object o) {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean containsAll(Collection<?> c) {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
@Override
|
||||
public boolean containsAll(Collection<?> c) {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean addAll(Collection<? extends V> c) {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
@Override
|
||||
public boolean addAll(Collection<? extends V> c) {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean retainAll(Collection<?> c) {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
@Override
|
||||
public boolean retainAll(Collection<?> c) {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean removeAll(Collection<?> c) {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
@Override
|
||||
public boolean removeAll(Collection<?> c) {
|
||||
throwShouldNeverCall();
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
throwShouldNeverCall();
|
||||
}
|
||||
@Override
|
||||
public void clear() {
|
||||
throwShouldNeverCall();
|
||||
}
|
||||
|
||||
private void throwShouldNeverCall() {
|
||||
throw new HelenusMappingException("should be never called");
|
||||
}
|
||||
private void throwShouldNeverCall() {
|
||||
throw new HelenusMappingException("should be never called");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "SetDsl";
|
||||
}
|
||||
@Override
|
||||
public String toString() {
|
||||
return "SetDsl";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
package net.helenus.mapping;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
|
||||
import net.helenus.mapping.annotation.ClusteringColumn;
|
||||
import net.helenus.mapping.annotation.Column;
|
||||
import net.helenus.mapping.annotation.PartitionKey;
|
||||
|
@ -24,99 +25,91 @@ import net.helenus.support.HelenusMappingException;
|
|||
|
||||
public final class ColumnInformation {
|
||||
|
||||
private final IdentityName columnName;
|
||||
private final ColumnType columnType;
|
||||
private final int ordinal;
|
||||
private final OrderingDirection ordering;
|
||||
private final IdentityName columnName;
|
||||
private final ColumnType columnType;
|
||||
private final int ordinal;
|
||||
private final OrderingDirection ordering;
|
||||
|
||||
public ColumnInformation(Method getter) {
|
||||
public ColumnInformation(Method getter) {
|
||||
|
||||
String columnName = null;
|
||||
boolean forceQuote = false;
|
||||
ColumnType columnTypeLocal = ColumnType.COLUMN;
|
||||
int ordinalLocal = 0;
|
||||
OrderingDirection orderingLocal = OrderingDirection.ASC;
|
||||
String columnName = null;
|
||||
boolean forceQuote = false;
|
||||
ColumnType columnTypeLocal = ColumnType.COLUMN;
|
||||
int ordinalLocal = 0;
|
||||
OrderingDirection orderingLocal = OrderingDirection.ASC;
|
||||
|
||||
PartitionKey partitionKey = getter.getDeclaredAnnotation(PartitionKey.class);
|
||||
if (partitionKey != null) {
|
||||
columnName = partitionKey.value();
|
||||
forceQuote = partitionKey.forceQuote();
|
||||
columnTypeLocal = ColumnType.PARTITION_KEY;
|
||||
ordinalLocal = partitionKey.ordinal();
|
||||
}
|
||||
PartitionKey partitionKey = getter.getDeclaredAnnotation(PartitionKey.class);
|
||||
if (partitionKey != null) {
|
||||
columnName = partitionKey.value();
|
||||
forceQuote = partitionKey.forceQuote();
|
||||
columnTypeLocal = ColumnType.PARTITION_KEY;
|
||||
ordinalLocal = partitionKey.ordinal();
|
||||
}
|
||||
|
||||
ClusteringColumn clusteringColumn = getter.getDeclaredAnnotation(ClusteringColumn.class);
|
||||
if (clusteringColumn != null) {
|
||||
ensureSingleColumnType(columnTypeLocal, getter);
|
||||
columnName = clusteringColumn.value();
|
||||
forceQuote = clusteringColumn.forceQuote();
|
||||
columnTypeLocal = ColumnType.CLUSTERING_COLUMN;
|
||||
ordinalLocal = clusteringColumn.ordinal();
|
||||
orderingLocal = clusteringColumn.ordering();
|
||||
}
|
||||
ClusteringColumn clusteringColumn = getter.getDeclaredAnnotation(ClusteringColumn.class);
|
||||
if (clusteringColumn != null) {
|
||||
ensureSingleColumnType(columnTypeLocal, getter);
|
||||
columnName = clusteringColumn.value();
|
||||
forceQuote = clusteringColumn.forceQuote();
|
||||
columnTypeLocal = ColumnType.CLUSTERING_COLUMN;
|
||||
ordinalLocal = clusteringColumn.ordinal();
|
||||
orderingLocal = clusteringColumn.ordering();
|
||||
}
|
||||
|
||||
StaticColumn staticColumn = getter.getDeclaredAnnotation(StaticColumn.class);
|
||||
if (staticColumn != null) {
|
||||
ensureSingleColumnType(columnTypeLocal, getter);
|
||||
columnName = staticColumn.value();
|
||||
forceQuote = staticColumn.forceQuote();
|
||||
columnTypeLocal = ColumnType.STATIC_COLUMN;
|
||||
ordinalLocal = staticColumn.ordinal();
|
||||
}
|
||||
StaticColumn staticColumn = getter.getDeclaredAnnotation(StaticColumn.class);
|
||||
if (staticColumn != null) {
|
||||
ensureSingleColumnType(columnTypeLocal, getter);
|
||||
columnName = staticColumn.value();
|
||||
forceQuote = staticColumn.forceQuote();
|
||||
columnTypeLocal = ColumnType.STATIC_COLUMN;
|
||||
ordinalLocal = staticColumn.ordinal();
|
||||
}
|
||||
|
||||
Column column = getter.getDeclaredAnnotation(Column.class);
|
||||
if (column != null) {
|
||||
ensureSingleColumnType(columnTypeLocal, getter);
|
||||
columnName = column.value();
|
||||
forceQuote = column.forceQuote();
|
||||
columnTypeLocal = ColumnType.COLUMN;
|
||||
ordinalLocal = column.ordinal();
|
||||
}
|
||||
Column column = getter.getDeclaredAnnotation(Column.class);
|
||||
if (column != null) {
|
||||
ensureSingleColumnType(columnTypeLocal, getter);
|
||||
columnName = column.value();
|
||||
forceQuote = column.forceQuote();
|
||||
columnTypeLocal = ColumnType.COLUMN;
|
||||
ordinalLocal = column.ordinal();
|
||||
}
|
||||
|
||||
if (columnName == null || columnName.isEmpty()) {
|
||||
columnName = MappingUtil.getDefaultColumnName(getter);
|
||||
}
|
||||
if (columnName == null || columnName.isEmpty()) {
|
||||
columnName = MappingUtil.getDefaultColumnName(getter);
|
||||
}
|
||||
|
||||
this.columnName = new IdentityName(columnName, forceQuote);
|
||||
this.columnType = columnTypeLocal;
|
||||
this.ordinal = ordinalLocal;
|
||||
this.ordering = orderingLocal;
|
||||
}
|
||||
this.columnName = new IdentityName(columnName, forceQuote);
|
||||
this.columnType = columnTypeLocal;
|
||||
this.ordinal = ordinalLocal;
|
||||
this.ordering = orderingLocal;
|
||||
}
|
||||
|
||||
public IdentityName getColumnName() {
|
||||
return columnName;
|
||||
}
|
||||
public IdentityName getColumnName() {
|
||||
return columnName;
|
||||
}
|
||||
|
||||
public ColumnType getColumnType() {
|
||||
return columnType;
|
||||
}
|
||||
public ColumnType getColumnType() {
|
||||
return columnType;
|
||||
}
|
||||
|
||||
public int getOrdinal() {
|
||||
return ordinal;
|
||||
}
|
||||
public int getOrdinal() {
|
||||
return ordinal;
|
||||
}
|
||||
|
||||
public OrderingDirection getOrdering() {
|
||||
return ordering;
|
||||
}
|
||||
public OrderingDirection getOrdering() {
|
||||
return ordering;
|
||||
}
|
||||
|
||||
private void ensureSingleColumnType(ColumnType columnTypeLocal, Method getter) {
|
||||
private void ensureSingleColumnType(ColumnType columnTypeLocal, Method getter) {
|
||||
|
||||
if (columnTypeLocal != ColumnType.COLUMN) {
|
||||
throw new HelenusMappingException(
|
||||
"property can be annotated only by a single column type " + getter);
|
||||
}
|
||||
}
|
||||
if (columnTypeLocal != ColumnType.COLUMN) {
|
||||
throw new HelenusMappingException("property can be annotated only by a single column type " + getter);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ColumnInformation [columnName="
|
||||
+ columnName
|
||||
+ ", columnType="
|
||||
+ columnType
|
||||
+ ", ordinal="
|
||||
+ ordinal
|
||||
+ ", ordering="
|
||||
+ ordering
|
||||
+ "]";
|
||||
}
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ColumnInformation [columnName=" + columnName + ", columnType=" + columnType + ", ordinal=" + ordinal
|
||||
+ ", ordering=" + ordering + "]";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,8 +16,5 @@
|
|||
package net.helenus.mapping;
|
||||
|
||||
public enum ColumnType {
|
||||
PARTITION_KEY,
|
||||
CLUSTERING_COLUMN,
|
||||
STATIC_COLUMN,
|
||||
COLUMN;
|
||||
PARTITION_KEY, CLUSTERING_COLUMN, STATIC_COLUMN, COLUMN;
|
||||
}
|
||||
|
|
|
@ -16,18 +16,23 @@
|
|||
package net.helenus.mapping;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
import net.helenus.core.cache.Facet;
|
||||
|
||||
public interface HelenusEntity {
|
||||
|
||||
HelenusEntityType getType();
|
||||
HelenusEntityType getType();
|
||||
|
||||
boolean isCacheable();
|
||||
boolean isCacheable();
|
||||
|
||||
Class<?> getMappingInterface();
|
||||
Class<?> getMappingInterface();
|
||||
|
||||
IdentityName getName();
|
||||
IdentityName getName();
|
||||
|
||||
Collection<HelenusProperty> getOrderedProperties();
|
||||
Collection<HelenusProperty> getOrderedProperties();
|
||||
|
||||
HelenusProperty getProperty(String name);
|
||||
HelenusProperty getProperty(String name);
|
||||
|
||||
List<Facet> getFacets();
|
||||
}
|
||||
|
|
|
@ -16,8 +16,5 @@
|
|||
package net.helenus.mapping;
|
||||
|
||||
public enum HelenusEntityType {
|
||||
TABLE,
|
||||
VIEW,
|
||||
TUPLE,
|
||||
UDT;
|
||||
TABLE, VIEW, TUPLE, UDT;
|
||||
}
|
||||
|
|
|
@ -15,275 +15,293 @@
|
|||
*/
|
||||
package net.helenus.mapping;
|
||||
|
||||
import com.datastax.driver.core.*;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.*;
|
||||
|
||||
import org.apache.commons.lang3.ClassUtils;
|
||||
|
||||
import com.datastax.driver.core.DefaultMetadata;
|
||||
import com.datastax.driver.core.Metadata;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
|
||||
import net.helenus.config.HelenusSettings;
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.annotation.Cacheable;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.core.cache.UnboundFacet;
|
||||
import net.helenus.mapping.annotation.*;
|
||||
import net.helenus.support.HelenusMappingException;
|
||||
import org.apache.commons.lang3.ClassUtils;
|
||||
|
||||
public final class HelenusMappingEntity implements HelenusEntity {
|
||||
|
||||
private final Class<?> iface;
|
||||
private final HelenusEntityType type;
|
||||
private final IdentityName name;
|
||||
private final boolean cacheable;
|
||||
private final ImmutableMap<String, Method> methods;
|
||||
private final ImmutableMap<String, HelenusProperty> props;
|
||||
private final ImmutableList<HelenusProperty> orderedProps;
|
||||
private final Class<?> iface;
|
||||
private final HelenusEntityType type;
|
||||
private final IdentityName name;
|
||||
private final boolean cacheable;
|
||||
private final ImmutableMap<String, Method> methods;
|
||||
private final ImmutableMap<String, HelenusProperty> props;
|
||||
private final ImmutableList<HelenusProperty> orderedProps;
|
||||
private final List<Facet> facets;
|
||||
|
||||
public HelenusMappingEntity(Class<?> iface, Metadata metadata) {
|
||||
this(iface, autoDetectType(iface), metadata);
|
||||
}
|
||||
public HelenusMappingEntity(Class<?> iface, Metadata metadata) {
|
||||
this(iface, autoDetectType(iface), metadata);
|
||||
}
|
||||
|
||||
public HelenusMappingEntity(Class<?> iface, HelenusEntityType type, Metadata metadata) {
|
||||
public HelenusMappingEntity(Class<?> iface, HelenusEntityType type, Metadata metadata) {
|
||||
|
||||
if (iface == null || !iface.isInterface()) {
|
||||
throw new IllegalArgumentException("invalid parameter " + iface);
|
||||
}
|
||||
if (iface == null || !iface.isInterface()) {
|
||||
throw new IllegalArgumentException("invalid parameter " + iface);
|
||||
}
|
||||
|
||||
this.iface = iface;
|
||||
this.type = Objects.requireNonNull(type, "type is empty");
|
||||
this.name = resolveName(iface, type);
|
||||
this.iface = iface;
|
||||
this.type = Objects.requireNonNull(type, "type is empty");
|
||||
this.name = resolveName(iface, type);
|
||||
|
||||
HelenusSettings settings = Helenus.settings();
|
||||
HelenusSettings settings = Helenus.settings();
|
||||
|
||||
Map<String, Method> methods = new HashMap<String, Method>();
|
||||
for (Method m : iface.getDeclaredMethods()) {
|
||||
methods.put(m.getName(), m);
|
||||
}
|
||||
Map<String, Method> methods = new HashMap<String, Method>();
|
||||
for (Method m : iface.getDeclaredMethods()) {
|
||||
methods.put(m.getName(), m);
|
||||
}
|
||||
|
||||
for (Class<?> c : ClassUtils.getAllInterfaces(iface)) {
|
||||
if (c.getDeclaredAnnotation(Table.class) != null
|
||||
|| c.getDeclaredAnnotation(InheritedTable.class) != null) {
|
||||
for (Method m : c.getDeclaredMethods()) {
|
||||
Method o = methods.get(m.getName());
|
||||
if (o != null) {
|
||||
// Prefer overridden method implementation.
|
||||
if (o.getDeclaringClass().isAssignableFrom(m.getDeclaringClass())) {
|
||||
methods.put(m.getName(), m);
|
||||
}
|
||||
} else {
|
||||
methods.put(m.getName(), m);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for (Class<?> c : ClassUtils.getAllInterfaces(iface)) {
|
||||
if (c.getDeclaredAnnotation(Table.class) != null || c.getDeclaredAnnotation(InheritedTable.class) != null) {
|
||||
for (Method m : c.getDeclaredMethods()) {
|
||||
Method o = methods.get(m.getName());
|
||||
if (o != null) {
|
||||
// Prefer overridden method implementation.
|
||||
if (o.getDeclaringClass().isAssignableFrom(m.getDeclaringClass())) {
|
||||
methods.put(m.getName(), m);
|
||||
}
|
||||
} else {
|
||||
methods.put(m.getName(), m);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
List<HelenusProperty> propsLocal = new ArrayList<HelenusProperty>();
|
||||
ImmutableMap.Builder<String, HelenusProperty> propsBuilder = ImmutableMap.builder();
|
||||
ImmutableMap.Builder<String, Method> methodsBuilder = ImmutableMap.builder();
|
||||
List<HelenusProperty> propsLocal = new ArrayList<HelenusProperty>();
|
||||
ImmutableMap.Builder<String, HelenusProperty> propsBuilder = ImmutableMap.builder();
|
||||
ImmutableMap.Builder<String, Method> methodsBuilder = ImmutableMap.builder();
|
||||
|
||||
for (Method method : methods.values()) {
|
||||
for (Method method : methods.values()) {
|
||||
|
||||
if (settings.getGetterMethodDetector().apply(method)) {
|
||||
if (settings.getGetterMethodDetector().apply(method)) {
|
||||
|
||||
methodsBuilder.put(method.getName(), method);
|
||||
methodsBuilder.put(method.getName(), method);
|
||||
|
||||
if (metadata != null) {
|
||||
HelenusProperty prop = new HelenusMappingProperty(this, method, metadata);
|
||||
if (metadata != null) {
|
||||
HelenusProperty prop = new HelenusMappingProperty(this, method, metadata);
|
||||
|
||||
propsBuilder.put(prop.getPropertyName(), prop);
|
||||
propsLocal.add(prop);
|
||||
}
|
||||
}
|
||||
}
|
||||
propsBuilder.put(prop.getPropertyName(), prop);
|
||||
propsLocal.add(prop);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.methods = methodsBuilder.build();
|
||||
this.props = propsBuilder.build();
|
||||
this.methods = methodsBuilder.build();
|
||||
this.props = propsBuilder.build();
|
||||
|
||||
Collections.sort(propsLocal, TypeAndOrdinalColumnComparator.INSTANCE);
|
||||
this.orderedProps = ImmutableList.copyOf(propsLocal);
|
||||
Collections.sort(propsLocal, TypeAndOrdinalColumnComparator.INSTANCE);
|
||||
this.orderedProps = ImmutableList.copyOf(propsLocal);
|
||||
|
||||
validateOrdinals();
|
||||
validateOrdinals();
|
||||
|
||||
cacheable = (null != iface.getDeclaredAnnotation(Cacheable.class));
|
||||
}
|
||||
// Caching
|
||||
cacheable = (null != iface.getDeclaredAnnotation(Cacheable.class));
|
||||
|
||||
@Override
|
||||
public HelenusEntityType getType() {
|
||||
return type;
|
||||
}
|
||||
List<HelenusProperty> primaryKeyProperties = new ArrayList<>();
|
||||
ImmutableList.Builder<Facet> facetsBuilder = ImmutableList.builder();
|
||||
facetsBuilder.add(new Facet("table", name.toCql()).setFixed());
|
||||
for (HelenusProperty prop : orderedProps) {
|
||||
switch (prop.getColumnType()) {
|
||||
case PARTITION_KEY :
|
||||
case CLUSTERING_COLUMN :
|
||||
primaryKeyProperties.add(prop);
|
||||
break;
|
||||
default :
|
||||
if (primaryKeyProperties != null && primaryKeyProperties.size() > 0) {
|
||||
facetsBuilder.add(new UnboundFacet(primaryKeyProperties));
|
||||
primaryKeyProperties = null;
|
||||
}
|
||||
Optional<IdentityName> optionalIndexName = prop.getIndexName();
|
||||
if (optionalIndexName.isPresent()) {
|
||||
UnboundFacet facet = new UnboundFacet(prop);
|
||||
facetsBuilder.add(facet);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (primaryKeyProperties != null && primaryKeyProperties.size() > 0) {
|
||||
facetsBuilder.add(new UnboundFacet(primaryKeyProperties));
|
||||
}
|
||||
this.facets = facetsBuilder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isCacheable() {
|
||||
return cacheable;
|
||||
}
|
||||
private static IdentityName resolveName(Class<?> iface, HelenusEntityType type) {
|
||||
|
||||
@Override
|
||||
public Class<?> getMappingInterface() {
|
||||
return iface;
|
||||
}
|
||||
switch (type) {
|
||||
case TABLE :
|
||||
return MappingUtil.getTableName(iface, true);
|
||||
|
||||
@Override
|
||||
public Collection<HelenusProperty> getOrderedProperties() {
|
||||
return orderedProps;
|
||||
}
|
||||
case VIEW :
|
||||
return MappingUtil.getViewName(iface, true);
|
||||
|
||||
@Override
|
||||
public HelenusProperty getProperty(String name) {
|
||||
HelenusProperty property = props.get(name);
|
||||
if (property == null && methods.containsKey(name)) {
|
||||
property = new HelenusMappingProperty(this, methods.get(name), new DefaultMetadata());
|
||||
return property; //TODO(gburd): review adding these into the props map...
|
||||
}
|
||||
return props.get(name);
|
||||
}
|
||||
case TUPLE :
|
||||
return IdentityName.of(MappingUtil.getDefaultEntityName(iface), false);
|
||||
|
||||
@Override
|
||||
public IdentityName getName() {
|
||||
return name;
|
||||
}
|
||||
case UDT :
|
||||
return MappingUtil.getUserDefinedTypeName(iface, true);
|
||||
}
|
||||
|
||||
private static IdentityName resolveName(Class<?> iface, HelenusEntityType type) {
|
||||
throw new HelenusMappingException("invalid entity type " + type + " in " + type);
|
||||
}
|
||||
|
||||
switch (type) {
|
||||
case TABLE:
|
||||
return MappingUtil.getTableName(iface, true);
|
||||
private static HelenusEntityType autoDetectType(Class<?> iface) {
|
||||
|
||||
case VIEW:
|
||||
return MappingUtil.getViewName(iface, true);
|
||||
Objects.requireNonNull(iface, "empty iface");
|
||||
|
||||
case TUPLE:
|
||||
return IdentityName.of(MappingUtil.getDefaultEntityName(iface), false);
|
||||
if (null != iface.getDeclaredAnnotation(Table.class)) {
|
||||
return HelenusEntityType.TABLE;
|
||||
} else if (null != iface.getDeclaredAnnotation(MaterializedView.class)) {
|
||||
return HelenusEntityType.VIEW;
|
||||
} else if (null != iface.getDeclaredAnnotation(Tuple.class)) {
|
||||
return HelenusEntityType.TUPLE;
|
||||
} else if (null != iface.getDeclaredAnnotation(UDT.class)) {
|
||||
return HelenusEntityType.UDT;
|
||||
}
|
||||
|
||||
case UDT:
|
||||
return MappingUtil.getUserDefinedTypeName(iface, true);
|
||||
}
|
||||
throw new HelenusMappingException("entity must be annotated by @Table or @Tuple or @UserDefinedType " + iface);
|
||||
}
|
||||
|
||||
throw new HelenusMappingException("invalid entity type " + type + " in " + type);
|
||||
}
|
||||
@Override
|
||||
public HelenusEntityType getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
private static HelenusEntityType autoDetectType(Class<?> iface) {
|
||||
@Override
|
||||
public boolean isCacheable() {
|
||||
return cacheable;
|
||||
}
|
||||
|
||||
Objects.requireNonNull(iface, "empty iface");
|
||||
@Override
|
||||
public Class<?> getMappingInterface() {
|
||||
return iface;
|
||||
}
|
||||
|
||||
if (null != iface.getDeclaredAnnotation(Table.class)) {
|
||||
return HelenusEntityType.TABLE;
|
||||
} else if (null != iface.getDeclaredAnnotation(MaterializedView.class)) {
|
||||
return HelenusEntityType.VIEW;
|
||||
} else if (null != iface.getDeclaredAnnotation(Tuple.class)) {
|
||||
return HelenusEntityType.TUPLE;
|
||||
} else if (null != iface.getDeclaredAnnotation(UDT.class)) {
|
||||
return HelenusEntityType.UDT;
|
||||
}
|
||||
@Override
|
||||
public Collection<HelenusProperty> getOrderedProperties() {
|
||||
return orderedProps;
|
||||
}
|
||||
|
||||
throw new HelenusMappingException(
|
||||
"entity must be annotated by @Table or @Tuple or @UserDefinedType " + iface);
|
||||
}
|
||||
@Override
|
||||
public HelenusProperty getProperty(String name) {
|
||||
HelenusProperty property = props.get(name);
|
||||
if (property == null && methods.containsKey(name)) {
|
||||
property = new HelenusMappingProperty(this, methods.get(name), new DefaultMetadata());
|
||||
return property; // TODO(gburd): review adding these into the props map...
|
||||
}
|
||||
return props.get(name);
|
||||
}
|
||||
|
||||
private void validateOrdinals() {
|
||||
@Override
|
||||
public List<Facet> getFacets() {
|
||||
return facets;
|
||||
}
|
||||
|
||||
switch (getType()) {
|
||||
case TABLE:
|
||||
validateOrdinalsForTable();
|
||||
break;
|
||||
@Override
|
||||
public IdentityName getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
case TUPLE:
|
||||
validateOrdinalsInTuple();
|
||||
break;
|
||||
private void validateOrdinals() {
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
switch (getType()) {
|
||||
case TABLE :
|
||||
validateOrdinalsForTable();
|
||||
break;
|
||||
|
||||
private void validateOrdinalsForTable() {
|
||||
case TUPLE :
|
||||
validateOrdinalsInTuple();
|
||||
break;
|
||||
|
||||
BitSet partitionKeys = new BitSet();
|
||||
BitSet clusteringColumns = new BitSet();
|
||||
default :
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
for (HelenusProperty prop : getOrderedProperties()) {
|
||||
private void validateOrdinalsForTable() {
|
||||
|
||||
ColumnType type = prop.getColumnType();
|
||||
BitSet partitionKeys = new BitSet();
|
||||
BitSet clusteringColumns = new BitSet();
|
||||
|
||||
int ordinal = prop.getOrdinal();
|
||||
for (HelenusProperty prop : getOrderedProperties()) {
|
||||
|
||||
switch (type) {
|
||||
case PARTITION_KEY:
|
||||
if (partitionKeys.get(ordinal)) {
|
||||
throw new HelenusMappingException(
|
||||
"detected two or more partition key columns with the same ordinal "
|
||||
+ ordinal
|
||||
+ " in "
|
||||
+ prop.getEntity());
|
||||
}
|
||||
partitionKeys.set(ordinal);
|
||||
break;
|
||||
ColumnType type = prop.getColumnType();
|
||||
|
||||
case CLUSTERING_COLUMN:
|
||||
if (clusteringColumns.get(ordinal)) {
|
||||
throw new HelenusMappingException(
|
||||
"detected two or clustering columns with the same ordinal "
|
||||
+ ordinal
|
||||
+ " in "
|
||||
+ prop.getEntity());
|
||||
}
|
||||
clusteringColumns.set(ordinal);
|
||||
break;
|
||||
int ordinal = prop.getOrdinal();
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
switch (type) {
|
||||
case PARTITION_KEY :
|
||||
if (partitionKeys.get(ordinal)) {
|
||||
throw new HelenusMappingException(
|
||||
"detected two or more partition key columns with the same ordinal " + ordinal + " in "
|
||||
+ prop.getEntity());
|
||||
}
|
||||
partitionKeys.set(ordinal);
|
||||
break;
|
||||
|
||||
private void validateOrdinalsInTuple() {
|
||||
boolean[] ordinals = new boolean[props.size()];
|
||||
case CLUSTERING_COLUMN :
|
||||
if (clusteringColumns.get(ordinal)) {
|
||||
throw new HelenusMappingException("detected two or clustering columns with the same ordinal "
|
||||
+ ordinal + " in " + prop.getEntity());
|
||||
}
|
||||
clusteringColumns.set(ordinal);
|
||||
break;
|
||||
|
||||
getOrderedProperties()
|
||||
.forEach(
|
||||
p -> {
|
||||
int ordinal = p.getOrdinal();
|
||||
default :
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (ordinal < 0 || ordinal >= ordinals.length) {
|
||||
throw new HelenusMappingException(
|
||||
"invalid ordinal "
|
||||
+ ordinal
|
||||
+ " found for property "
|
||||
+ p.getPropertyName()
|
||||
+ " in "
|
||||
+ p.getEntity());
|
||||
}
|
||||
private void validateOrdinalsInTuple() {
|
||||
boolean[] ordinals = new boolean[props.size()];
|
||||
|
||||
if (ordinals[ordinal]) {
|
||||
throw new HelenusMappingException(
|
||||
"detected two or more properties with the same ordinal "
|
||||
+ ordinal
|
||||
+ " in "
|
||||
+ p.getEntity());
|
||||
}
|
||||
getOrderedProperties().forEach(p -> {
|
||||
int ordinal = p.getOrdinal();
|
||||
|
||||
ordinals[ordinal] = true;
|
||||
});
|
||||
if (ordinal < 0 || ordinal >= ordinals.length) {
|
||||
throw new HelenusMappingException("invalid ordinal " + ordinal + " found for property "
|
||||
+ p.getPropertyName() + " in " + p.getEntity());
|
||||
}
|
||||
|
||||
for (int i = 0; i != ordinals.length; ++i) {
|
||||
if (!ordinals[i]) {
|
||||
throw new HelenusMappingException("detected absent ordinal " + i + " in " + this);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (ordinals[ordinal]) {
|
||||
throw new HelenusMappingException(
|
||||
"detected two or more properties with the same ordinal " + ordinal + " in " + p.getEntity());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
ordinals[ordinal] = true;
|
||||
});
|
||||
|
||||
StringBuilder str = new StringBuilder();
|
||||
str.append(iface.getSimpleName())
|
||||
.append("(")
|
||||
.append(name.getName())
|
||||
.append(") ")
|
||||
.append(type.name().toLowerCase())
|
||||
.append(":\n");
|
||||
for (int i = 0; i != ordinals.length; ++i) {
|
||||
if (!ordinals[i]) {
|
||||
throw new HelenusMappingException("detected absent ordinal " + i + " in " + this);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (HelenusProperty prop : getOrderedProperties()) {
|
||||
str.append(prop.toString());
|
||||
str.append("\n");
|
||||
}
|
||||
return str.toString();
|
||||
}
|
||||
@Override
|
||||
public String toString() {
|
||||
|
||||
StringBuilder str = new StringBuilder();
|
||||
str.append(iface.getSimpleName()).append("(").append(name.getName()).append(") ")
|
||||
.append(type.name().toLowerCase()).append(":\n");
|
||||
|
||||
for (HelenusProperty prop : getOrderedProperties()) {
|
||||
str.append(prop.toString());
|
||||
str.append("\n");
|
||||
}
|
||||
return str.toString();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,13 +15,16 @@
|
|||
*/
|
||||
package net.helenus.mapping;
|
||||
|
||||
import com.datastax.driver.core.Metadata;
|
||||
import java.lang.annotation.Annotation;
|
||||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.Type;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Function;
|
||||
|
||||
import javax.validation.ConstraintValidator;
|
||||
|
||||
import com.datastax.driver.core.Metadata;
|
||||
|
||||
import net.helenus.core.SessionRepository;
|
||||
import net.helenus.mapping.javatype.AbstractJavaType;
|
||||
import net.helenus.mapping.javatype.MappingJavaTypes;
|
||||
|
@ -29,174 +32,171 @@ import net.helenus.mapping.type.AbstractDataType;
|
|||
|
||||
public final class HelenusMappingProperty implements HelenusProperty {
|
||||
|
||||
private final HelenusEntity entity;
|
||||
private final Method getter;
|
||||
private final HelenusEntity entity;
|
||||
private final Method getter;
|
||||
|
||||
private final String propertyName;
|
||||
private final Optional<IdentityName> indexName;
|
||||
private final boolean caseSensitiveIndex;
|
||||
private final String propertyName;
|
||||
private final Optional<IdentityName> indexName;
|
||||
private final boolean caseSensitiveIndex;
|
||||
|
||||
private final ColumnInformation columnInfo;
|
||||
private final ColumnInformation columnInfo;
|
||||
|
||||
private final Type genericJavaType;
|
||||
private final Class<?> javaType;
|
||||
private final AbstractJavaType abstractJavaType;
|
||||
private final AbstractDataType dataType;
|
||||
private final Type genericJavaType;
|
||||
private final Class<?> javaType;
|
||||
private final AbstractJavaType abstractJavaType;
|
||||
private final AbstractDataType dataType;
|
||||
private final ConstraintValidator<? extends Annotation, ?>[] validators;
|
||||
private volatile Optional<Function<Object, Object>> readConverter = null;
|
||||
private volatile Optional<Function<Object, Object>> writeConverter = null;
|
||||
|
||||
private volatile Optional<Function<Object, Object>> readConverter = null;
|
||||
private volatile Optional<Function<Object, Object>> writeConverter = null;
|
||||
public HelenusMappingProperty(HelenusMappingEntity entity, Method getter, Metadata metadata) {
|
||||
this.entity = entity;
|
||||
this.getter = getter;
|
||||
|
||||
private final ConstraintValidator<? extends Annotation, ?>[] validators;
|
||||
this.propertyName = MappingUtil.getPropertyName(getter);
|
||||
this.indexName = MappingUtil.getIndexName(getter);
|
||||
this.caseSensitiveIndex = MappingUtil.caseSensitiveIndex(getter);
|
||||
|
||||
public HelenusMappingProperty(HelenusMappingEntity entity, Method getter, Metadata metadata) {
|
||||
this.entity = entity;
|
||||
this.getter = getter;
|
||||
this.columnInfo = new ColumnInformation(getter);
|
||||
|
||||
this.propertyName = MappingUtil.getPropertyName(getter);
|
||||
this.indexName = MappingUtil.getIndexName(getter);
|
||||
this.caseSensitiveIndex = MappingUtil.caseSensitiveIndex(getter);
|
||||
this.genericJavaType = getter.getGenericReturnType();
|
||||
this.javaType = getter.getReturnType();
|
||||
this.abstractJavaType = MappingJavaTypes.resolveJavaType(this.javaType);
|
||||
|
||||
this.columnInfo = new ColumnInformation(getter);
|
||||
this.dataType = abstractJavaType.resolveDataType(this.getter, this.genericJavaType,
|
||||
this.columnInfo.getColumnType(), metadata);
|
||||
|
||||
this.genericJavaType = getter.getGenericReturnType();
|
||||
this.javaType = getter.getReturnType();
|
||||
this.abstractJavaType = MappingJavaTypes.resolveJavaType(this.javaType);
|
||||
this.validators = MappingUtil.getValidators(getter);
|
||||
}
|
||||
|
||||
this.dataType =
|
||||
abstractJavaType.resolveDataType(
|
||||
this.getter, this.genericJavaType, this.columnInfo.getColumnType(), metadata);
|
||||
@Override
|
||||
public HelenusEntity getEntity() {
|
||||
return entity;
|
||||
}
|
||||
|
||||
this.validators = MappingUtil.getValidators(getter);
|
||||
}
|
||||
@Override
|
||||
public Class<?> getJavaType() {
|
||||
return (Class<?>) javaType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public HelenusEntity getEntity() {
|
||||
return entity;
|
||||
}
|
||||
@Override
|
||||
public AbstractDataType getDataType() {
|
||||
return dataType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<?> getJavaType() {
|
||||
return (Class<?>) javaType;
|
||||
}
|
||||
@Override
|
||||
public ColumnType getColumnType() {
|
||||
return columnInfo.getColumnType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public AbstractDataType getDataType() {
|
||||
return dataType;
|
||||
}
|
||||
@Override
|
||||
public int getOrdinal() {
|
||||
return columnInfo.getOrdinal();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ColumnType getColumnType() {
|
||||
return columnInfo.getColumnType();
|
||||
}
|
||||
@Override
|
||||
public OrderingDirection getOrdering() {
|
||||
return columnInfo.getOrdering();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getOrdinal() {
|
||||
return columnInfo.getOrdinal();
|
||||
}
|
||||
@Override
|
||||
public IdentityName getColumnName() {
|
||||
return columnInfo.getColumnName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public OrderingDirection getOrdering() {
|
||||
return columnInfo.getOrdering();
|
||||
}
|
||||
@Override
|
||||
public Optional<IdentityName> getIndexName() {
|
||||
return indexName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IdentityName getColumnName() {
|
||||
return columnInfo.getColumnName();
|
||||
}
|
||||
@Override
|
||||
public boolean caseSensitiveIndex() {
|
||||
return caseSensitiveIndex;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<IdentityName> getIndexName() {
|
||||
return indexName;
|
||||
}
|
||||
@Override
|
||||
public String getPropertyName() {
|
||||
return propertyName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean caseSensitiveIndex() {
|
||||
return caseSensitiveIndex;
|
||||
}
|
||||
@Override
|
||||
public Method getGetterMethod() {
|
||||
return getter;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getPropertyName() {
|
||||
return propertyName;
|
||||
}
|
||||
@Override
|
||||
public Optional<Function<Object, Object>> getReadConverter(SessionRepository repository) {
|
||||
|
||||
@Override
|
||||
public Method getGetterMethod() {
|
||||
return getter;
|
||||
}
|
||||
if (readConverter == null) {
|
||||
readConverter = abstractJavaType.resolveReadConverter(this.dataType, repository);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<Function<Object, Object>> getReadConverter(SessionRepository repository) {
|
||||
return readConverter;
|
||||
}
|
||||
|
||||
if (readConverter == null) {
|
||||
readConverter = abstractJavaType.resolveReadConverter(this.dataType, repository);
|
||||
}
|
||||
@Override
|
||||
public Optional<Function<Object, Object>> getWriteConverter(SessionRepository repository) {
|
||||
|
||||
return readConverter;
|
||||
}
|
||||
if (writeConverter == null) {
|
||||
writeConverter = abstractJavaType.resolveWriteConverter(this.dataType, repository);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<Function<Object, Object>> getWriteConverter(SessionRepository repository) {
|
||||
return writeConverter;
|
||||
}
|
||||
|
||||
if (writeConverter == null) {
|
||||
writeConverter = abstractJavaType.resolveWriteConverter(this.dataType, repository);
|
||||
}
|
||||
@Override
|
||||
public ConstraintValidator<? extends Annotation, ?>[] getValidators() {
|
||||
return validators;
|
||||
}
|
||||
|
||||
return writeConverter;
|
||||
}
|
||||
@Override
|
||||
public String toString() {
|
||||
|
||||
@Override
|
||||
public ConstraintValidator<? extends Annotation, ?>[] getValidators() {
|
||||
return validators;
|
||||
}
|
||||
StringBuilder str = new StringBuilder();
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
String columnName = this.getColumnName().getName();
|
||||
str.append(" ");
|
||||
str.append(this.getDataType());
|
||||
str.append(" ");
|
||||
str.append(this.getPropertyName());
|
||||
str.append("(");
|
||||
if (!columnName.equals(this.getPropertyName())) {
|
||||
str.append(columnName);
|
||||
}
|
||||
str.append(") ");
|
||||
|
||||
StringBuilder str = new StringBuilder();
|
||||
ColumnType type = this.getColumnType();
|
||||
|
||||
String columnName = this.getColumnName().getName();
|
||||
str.append(" ");
|
||||
str.append(this.getDataType());
|
||||
str.append(" ");
|
||||
str.append(this.getPropertyName());
|
||||
str.append("(");
|
||||
if (!columnName.equals(this.getPropertyName())) {
|
||||
str.append(columnName);
|
||||
}
|
||||
str.append(") ");
|
||||
switch (type) {
|
||||
case PARTITION_KEY :
|
||||
str.append("partition_key[");
|
||||
str.append(this.getOrdinal());
|
||||
str.append("] ");
|
||||
break;
|
||||
|
||||
ColumnType type = this.getColumnType();
|
||||
case CLUSTERING_COLUMN :
|
||||
str.append("clustering_column[");
|
||||
str.append(this.getOrdinal());
|
||||
str.append("] ");
|
||||
OrderingDirection od = this.getOrdering();
|
||||
if (od != null) {
|
||||
str.append(od.name().toLowerCase()).append(" ");
|
||||
}
|
||||
break;
|
||||
|
||||
switch (type) {
|
||||
case PARTITION_KEY:
|
||||
str.append("partition_key[");
|
||||
str.append(this.getOrdinal());
|
||||
str.append("] ");
|
||||
break;
|
||||
case STATIC_COLUMN :
|
||||
str.append("static ");
|
||||
break;
|
||||
|
||||
case CLUSTERING_COLUMN:
|
||||
str.append("clustering_column[");
|
||||
str.append(this.getOrdinal());
|
||||
str.append("] ");
|
||||
OrderingDirection od = this.getOrdering();
|
||||
if (od != null) {
|
||||
str.append(od.name().toLowerCase()).append(" ");
|
||||
}
|
||||
break;
|
||||
case COLUMN :
|
||||
break;
|
||||
}
|
||||
|
||||
case STATIC_COLUMN:
|
||||
str.append("static ");
|
||||
break;
|
||||
Optional<IdentityName> idx = this.getIndexName();
|
||||
if (idx.isPresent()) {
|
||||
str.append("index(").append(idx.get().getName()).append(") ");
|
||||
}
|
||||
|
||||
case COLUMN:
|
||||
break;
|
||||
}
|
||||
|
||||
Optional<IdentityName> idx = this.getIndexName();
|
||||
if (idx.isPresent()) {
|
||||
str.append("index(").append(idx.get().getName()).append(") ");
|
||||
}
|
||||
|
||||
return str.toString();
|
||||
}
|
||||
return str.toString();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,37 +19,39 @@ import java.lang.annotation.Annotation;
|
|||
import java.lang.reflect.Method;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Function;
|
||||
|
||||
import javax.validation.ConstraintValidator;
|
||||
|
||||
import net.helenus.core.SessionRepository;
|
||||
import net.helenus.mapping.type.AbstractDataType;
|
||||
|
||||
public interface HelenusProperty {
|
||||
|
||||
HelenusEntity getEntity();
|
||||
HelenusEntity getEntity();
|
||||
|
||||
String getPropertyName();
|
||||
String getPropertyName();
|
||||
|
||||
Method getGetterMethod();
|
||||
Method getGetterMethod();
|
||||
|
||||
IdentityName getColumnName();
|
||||
IdentityName getColumnName();
|
||||
|
||||
Optional<IdentityName> getIndexName();
|
||||
Optional<IdentityName> getIndexName();
|
||||
|
||||
boolean caseSensitiveIndex();
|
||||
boolean caseSensitiveIndex();
|
||||
|
||||
Class<?> getJavaType();
|
||||
Class<?> getJavaType();
|
||||
|
||||
AbstractDataType getDataType();
|
||||
AbstractDataType getDataType();
|
||||
|
||||
ColumnType getColumnType();
|
||||
ColumnType getColumnType();
|
||||
|
||||
int getOrdinal();
|
||||
int getOrdinal();
|
||||
|
||||
OrderingDirection getOrdering();
|
||||
OrderingDirection getOrdering();
|
||||
|
||||
Optional<Function<Object, Object>> getReadConverter(SessionRepository repository);
|
||||
Optional<Function<Object, Object>> getReadConverter(SessionRepository repository);
|
||||
|
||||
Optional<Function<Object, Object>> getWriteConverter(SessionRepository repository);
|
||||
Optional<Function<Object, Object>> getWriteConverter(SessionRepository repository);
|
||||
|
||||
ConstraintValidator<? extends Annotation, ?>[] getValidators();
|
||||
ConstraintValidator<? extends Annotation, ?>[] getValidators();
|
||||
}
|
||||
|
|
|
@ -19,41 +19,41 @@ import net.helenus.support.CqlUtil;
|
|||
|
||||
public final class IdentityName {
|
||||
|
||||
private final String name;
|
||||
private final String name;
|
||||
|
||||
private final boolean forceQuote;
|
||||
private final boolean forceQuote;
|
||||
|
||||
public IdentityName(String name, boolean forceQuote) {
|
||||
this.name = name.toLowerCase();
|
||||
this.forceQuote = forceQuote;
|
||||
}
|
||||
public IdentityName(String name, boolean forceQuote) {
|
||||
this.name = name.toLowerCase();
|
||||
this.forceQuote = forceQuote;
|
||||
}
|
||||
|
||||
public static IdentityName of(String name, boolean forceQuote) {
|
||||
return new IdentityName(name, forceQuote);
|
||||
}
|
||||
public static IdentityName of(String name, boolean forceQuote) {
|
||||
return new IdentityName(name, forceQuote);
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public boolean isForceQuote() {
|
||||
return forceQuote;
|
||||
}
|
||||
public boolean isForceQuote() {
|
||||
return forceQuote;
|
||||
}
|
||||
|
||||
public String toCql(boolean overrideForceQuote) {
|
||||
if (overrideForceQuote) {
|
||||
return CqlUtil.forceQuote(name);
|
||||
} else {
|
||||
return name;
|
||||
}
|
||||
}
|
||||
public String toCql(boolean overrideForceQuote) {
|
||||
if (overrideForceQuote) {
|
||||
return CqlUtil.forceQuote(name);
|
||||
} else {
|
||||
return name;
|
||||
}
|
||||
}
|
||||
|
||||
public String toCql() {
|
||||
return toCql(forceQuote);
|
||||
}
|
||||
public String toCql() {
|
||||
return toCql(forceQuote);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return toCql();
|
||||
}
|
||||
@Override
|
||||
public String toString() {
|
||||
return toCql();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,8 +20,10 @@ import java.lang.reflect.Method;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import javax.validation.Constraint;
|
||||
import javax.validation.ConstraintValidator;
|
||||
|
||||
import net.helenus.core.Getter;
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.reflect.*;
|
||||
|
@ -31,255 +33,253 @@ import net.helenus.support.HelenusMappingException;
|
|||
|
||||
public final class MappingUtil {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static final ConstraintValidator<? extends Annotation, ?>[] EMPTY_VALIDATORS =
|
||||
new ConstraintValidator[0];
|
||||
@SuppressWarnings("unchecked")
|
||||
public static final ConstraintValidator<? extends Annotation, ?>[] EMPTY_VALIDATORS = new ConstraintValidator[0];
|
||||
|
||||
private MappingUtil() {}
|
||||
private MappingUtil() {
|
||||
}
|
||||
|
||||
public static ConstraintValidator<? extends Annotation, ?>[] getValidators(Method getterMethod) {
|
||||
public static ConstraintValidator<? extends Annotation, ?>[] getValidators(Method getterMethod) {
|
||||
|
||||
List<ConstraintValidator<? extends Annotation, ?>> list = null;
|
||||
List<ConstraintValidator<? extends Annotation, ?>> list = null;
|
||||
|
||||
for (Annotation constraintAnnotation : getterMethod.getDeclaredAnnotations()) {
|
||||
for (Annotation constraintAnnotation : getterMethod.getDeclaredAnnotations()) {
|
||||
|
||||
list = addValidators(constraintAnnotation, list);
|
||||
list = addValidators(constraintAnnotation, list);
|
||||
|
||||
Class<? extends Annotation> annotationType = constraintAnnotation.annotationType();
|
||||
Class<? extends Annotation> annotationType = constraintAnnotation.annotationType();
|
||||
|
||||
for (Annotation possibleConstraint : annotationType.getDeclaredAnnotations()) {
|
||||
for (Annotation possibleConstraint : annotationType.getDeclaredAnnotations()) {
|
||||
|
||||
list = addValidators(possibleConstraint, list);
|
||||
}
|
||||
}
|
||||
list = addValidators(possibleConstraint, list);
|
||||
}
|
||||
}
|
||||
|
||||
if (list == null) {
|
||||
return EMPTY_VALIDATORS;
|
||||
} else {
|
||||
return list.toArray(EMPTY_VALIDATORS);
|
||||
}
|
||||
}
|
||||
if (list == null) {
|
||||
return EMPTY_VALIDATORS;
|
||||
} else {
|
||||
return list.toArray(EMPTY_VALIDATORS);
|
||||
}
|
||||
}
|
||||
|
||||
private static List<ConstraintValidator<? extends Annotation, ?>> addValidators(
|
||||
Annotation constraintAnnotation, List<ConstraintValidator<? extends Annotation, ?>> list) {
|
||||
private static List<ConstraintValidator<? extends Annotation, ?>> addValidators(Annotation constraintAnnotation,
|
||||
List<ConstraintValidator<? extends Annotation, ?>> list) {
|
||||
|
||||
Class<? extends Annotation> annotationType = constraintAnnotation.annotationType();
|
||||
Class<? extends Annotation> annotationType = constraintAnnotation.annotationType();
|
||||
|
||||
for (Annotation possibleConstraint : annotationType.getDeclaredAnnotations()) {
|
||||
for (Annotation possibleConstraint : annotationType.getDeclaredAnnotations()) {
|
||||
|
||||
if (possibleConstraint instanceof Constraint) {
|
||||
if (possibleConstraint instanceof Constraint) {
|
||||
|
||||
Constraint constraint = (Constraint) possibleConstraint;
|
||||
Constraint constraint = (Constraint) possibleConstraint;
|
||||
|
||||
for (Class<? extends ConstraintValidator<?, ?>> clazz : constraint.validatedBy()) {
|
||||
for (Class<? extends ConstraintValidator<?, ?>> clazz : constraint.validatedBy()) {
|
||||
|
||||
ConstraintValidator<? extends Annotation, ?> validator =
|
||||
ReflectionInstantiator.instantiateClass(clazz);
|
||||
ConstraintValidator<? extends Annotation, ?> validator = ReflectionInstantiator
|
||||
.instantiateClass(clazz);
|
||||
|
||||
((ConstraintValidator) validator).initialize(constraintAnnotation);
|
||||
((ConstraintValidator) validator).initialize(constraintAnnotation);
|
||||
|
||||
if (list == null) {
|
||||
list = new ArrayList<ConstraintValidator<? extends Annotation, ?>>();
|
||||
}
|
||||
if (list == null) {
|
||||
list = new ArrayList<ConstraintValidator<? extends Annotation, ?>>();
|
||||
}
|
||||
|
||||
list.add(validator);
|
||||
}
|
||||
}
|
||||
}
|
||||
list.add(validator);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return list;
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
public static Optional<IdentityName> getIndexName(Method getterMethod) {
|
||||
public static Optional<IdentityName> getIndexName(Method getterMethod) {
|
||||
|
||||
String indexName = null;
|
||||
boolean forceQuote = false;
|
||||
String indexName = null;
|
||||
boolean forceQuote = false;
|
||||
|
||||
Index index = getterMethod.getDeclaredAnnotation(Index.class);
|
||||
Index index = getterMethod.getDeclaredAnnotation(Index.class);
|
||||
|
||||
if (index != null) {
|
||||
indexName = index.value();
|
||||
forceQuote = index.forceQuote();
|
||||
if (index != null) {
|
||||
indexName = index.value();
|
||||
forceQuote = index.forceQuote();
|
||||
|
||||
if (indexName == null || indexName.isEmpty()) {
|
||||
indexName = getDefaultColumnName(getterMethod);
|
||||
}
|
||||
}
|
||||
if (indexName == null || indexName.isEmpty()) {
|
||||
indexName = getDefaultColumnName(getterMethod);
|
||||
}
|
||||
}
|
||||
|
||||
return indexName != null
|
||||
? Optional.of(new IdentityName(indexName, forceQuote))
|
||||
: Optional.empty();
|
||||
}
|
||||
return indexName != null ? Optional.of(new IdentityName(indexName, forceQuote)) : Optional.empty();
|
||||
}
|
||||
|
||||
public static boolean caseSensitiveIndex(Method getterMethod) {
|
||||
Index index = getterMethod.getDeclaredAnnotation(Index.class);
|
||||
public static boolean caseSensitiveIndex(Method getterMethod) {
|
||||
Index index = getterMethod.getDeclaredAnnotation(Index.class);
|
||||
|
||||
if (index != null) {
|
||||
return index.caseSensitive();
|
||||
}
|
||||
if (index != null) {
|
||||
return index.caseSensitive();
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public static String getPropertyName(Method getter) {
|
||||
return getter.getName();
|
||||
}
|
||||
public static String getPropertyName(Method getter) {
|
||||
return getter.getName();
|
||||
}
|
||||
|
||||
public static String getDefaultColumnName(Method getter) {
|
||||
return Helenus.settings().getPropertyToColumnConverter().apply(getPropertyName(getter));
|
||||
}
|
||||
public static String getDefaultColumnName(Method getter) {
|
||||
return Helenus.settings().getPropertyToColumnConverter().apply(getPropertyName(getter));
|
||||
}
|
||||
|
||||
public static IdentityName getUserDefinedTypeName(Class<?> iface, boolean required) {
|
||||
public static IdentityName getUserDefinedTypeName(Class<?> iface, boolean required) {
|
||||
|
||||
String userTypeName = null;
|
||||
boolean forceQuote = false;
|
||||
String userTypeName = null;
|
||||
boolean forceQuote = false;
|
||||
|
||||
UDT userDefinedType = iface.getDeclaredAnnotation(UDT.class);
|
||||
UDT userDefinedType = iface.getDeclaredAnnotation(UDT.class);
|
||||
|
||||
if (userDefinedType != null) {
|
||||
if (userDefinedType != null) {
|
||||
|
||||
userTypeName = userDefinedType.value();
|
||||
forceQuote = userDefinedType.forceQuote();
|
||||
userTypeName = userDefinedType.value();
|
||||
forceQuote = userDefinedType.forceQuote();
|
||||
|
||||
if (userTypeName == null || userTypeName.isEmpty()) {
|
||||
userTypeName = getDefaultEntityName(iface);
|
||||
}
|
||||
if (userTypeName == null || userTypeName.isEmpty()) {
|
||||
userTypeName = getDefaultEntityName(iface);
|
||||
}
|
||||
|
||||
return new IdentityName(userTypeName, forceQuote);
|
||||
}
|
||||
return new IdentityName(userTypeName, forceQuote);
|
||||
}
|
||||
|
||||
if (required) {
|
||||
throw new HelenusMappingException("entity must have annotation @UserDefinedType " + iface);
|
||||
}
|
||||
if (required) {
|
||||
throw new HelenusMappingException("entity must have annotation @UserDefinedType " + iface);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public static boolean isTuple(Class<?> iface) {
|
||||
public static boolean isTuple(Class<?> iface) {
|
||||
|
||||
Tuple tuple = iface.getDeclaredAnnotation(Tuple.class);
|
||||
Tuple tuple = iface.getDeclaredAnnotation(Tuple.class);
|
||||
|
||||
return tuple != null;
|
||||
}
|
||||
return tuple != null;
|
||||
}
|
||||
|
||||
public static boolean isUDT(Class<?> iface) {
|
||||
public static boolean isUDT(Class<?> iface) {
|
||||
|
||||
UDT udt = iface.getDeclaredAnnotation(UDT.class);
|
||||
UDT udt = iface.getDeclaredAnnotation(UDT.class);
|
||||
|
||||
return udt != null;
|
||||
}
|
||||
return udt != null;
|
||||
}
|
||||
|
||||
public static IdentityName getViewName(Class<?> iface, boolean required) {
|
||||
public static IdentityName getViewName(Class<?> iface, boolean required) {
|
||||
|
||||
String viewName = null;
|
||||
boolean forceQuote = false;
|
||||
String viewName = null;
|
||||
boolean forceQuote = false;
|
||||
|
||||
MaterializedView view = iface.getDeclaredAnnotation(MaterializedView.class);
|
||||
MaterializedView view = iface.getDeclaredAnnotation(MaterializedView.class);
|
||||
|
||||
if (view != null) {
|
||||
viewName = view.value();
|
||||
forceQuote = view.forceQuote();
|
||||
if (view != null) {
|
||||
viewName = view.value();
|
||||
forceQuote = view.forceQuote();
|
||||
|
||||
} else if (required) {
|
||||
throw new HelenusMappingException("entity must have annotation @Table " + iface);
|
||||
}
|
||||
} else if (required) {
|
||||
throw new HelenusMappingException("entity must have annotation @Table " + iface);
|
||||
}
|
||||
|
||||
if (viewName == null || viewName.isEmpty()) {
|
||||
viewName = getDefaultEntityName(iface);
|
||||
}
|
||||
if (viewName == null || viewName.isEmpty()) {
|
||||
viewName = getDefaultEntityName(iface);
|
||||
}
|
||||
|
||||
return new IdentityName(viewName, forceQuote);
|
||||
}
|
||||
return new IdentityName(viewName, forceQuote);
|
||||
}
|
||||
|
||||
public static IdentityName getTableName(Class<?> iface, boolean required) {
|
||||
public static IdentityName getTableName(Class<?> iface, boolean required) {
|
||||
|
||||
String tableName = null;
|
||||
boolean forceQuote = false;
|
||||
String tableName = null;
|
||||
boolean forceQuote = false;
|
||||
|
||||
Table table = iface.getDeclaredAnnotation(Table.class);
|
||||
Table table = iface.getDeclaredAnnotation(Table.class);
|
||||
|
||||
if (table != null) {
|
||||
tableName = table.value();
|
||||
forceQuote = table.forceQuote();
|
||||
if (table != null) {
|
||||
tableName = table.value();
|
||||
forceQuote = table.forceQuote();
|
||||
|
||||
} else if (required) {
|
||||
throw new HelenusMappingException("entity must have annotation @Table " + iface);
|
||||
}
|
||||
} else if (required) {
|
||||
throw new HelenusMappingException("entity must have annotation @Table " + iface);
|
||||
}
|
||||
|
||||
if (tableName == null || tableName.isEmpty()) {
|
||||
tableName = getDefaultEntityName(iface);
|
||||
}
|
||||
if (tableName == null || tableName.isEmpty()) {
|
||||
tableName = getDefaultEntityName(iface);
|
||||
}
|
||||
|
||||
return new IdentityName(tableName, forceQuote);
|
||||
}
|
||||
return new IdentityName(tableName, forceQuote);
|
||||
}
|
||||
|
||||
public static String getDefaultEntityName(Class<?> iface) {
|
||||
return Helenus.settings().getPropertyToColumnConverter().apply(iface.getSimpleName());
|
||||
}
|
||||
public static String getDefaultEntityName(Class<?> iface) {
|
||||
return Helenus.settings().getPropertyToColumnConverter().apply(iface.getSimpleName());
|
||||
}
|
||||
|
||||
public static Class<?> getMappingInterface(Object pojo) {
|
||||
public static Class<?> getMappingInterface(Object pojo) {
|
||||
|
||||
Class<?> iface = null;
|
||||
Class<?> iface = null;
|
||||
|
||||
if (pojo instanceof Class) {
|
||||
iface = (Class<?>) pojo;
|
||||
if (pojo instanceof Class) {
|
||||
iface = (Class<?>) pojo;
|
||||
|
||||
if (!iface.isInterface()) {
|
||||
throw new HelenusMappingException("expected interface " + iface);
|
||||
}
|
||||
if (!iface.isInterface()) {
|
||||
throw new HelenusMappingException("expected interface " + iface);
|
||||
}
|
||||
|
||||
} else {
|
||||
Class<?>[] ifaces = pojo.getClass().getInterfaces();
|
||||
} else {
|
||||
Class<?>[] ifaces = pojo.getClass().getInterfaces();
|
||||
|
||||
int len = ifaces.length;
|
||||
for (int i = 0; i != len; ++i) {
|
||||
int len = ifaces.length;
|
||||
for (int i = 0; i != len; ++i) {
|
||||
|
||||
iface = ifaces[0];
|
||||
iface = ifaces[0];
|
||||
|
||||
if (MapExportable.class.isAssignableFrom(iface)) {
|
||||
continue;
|
||||
}
|
||||
if (MapExportable.class.isAssignableFrom(iface)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (iface.getDeclaredAnnotation(Table.class) != null
|
||||
|| iface.getDeclaredAnnotation(MaterializedView.class) != null
|
||||
|| iface.getDeclaredAnnotation(UDT.class) != null
|
||||
|| iface.getDeclaredAnnotation(Tuple.class) != null) {
|
||||
if (iface.getDeclaredAnnotation(Table.class) != null
|
||||
|| iface.getDeclaredAnnotation(MaterializedView.class) != null
|
||||
|| iface.getDeclaredAnnotation(UDT.class) != null
|
||||
|| iface.getDeclaredAnnotation(Tuple.class) != null) {
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (iface == null) {
|
||||
throw new HelenusMappingException("dsl interface not found for " + pojo);
|
||||
}
|
||||
if (iface == null) {
|
||||
throw new HelenusMappingException("dsl interface not found for " + pojo);
|
||||
}
|
||||
|
||||
return iface;
|
||||
}
|
||||
return iface;
|
||||
}
|
||||
|
||||
public static HelenusPropertyNode resolveMappingProperty(Getter<?> getter) {
|
||||
public static HelenusPropertyNode resolveMappingProperty(Getter<?> getter) {
|
||||
|
||||
try {
|
||||
Object childDsl = getter.get();
|
||||
try {
|
||||
Object childDsl = getter.get();
|
||||
|
||||
if (childDsl instanceof DslExportable) {
|
||||
DslExportable e = (DslExportable) childDsl;
|
||||
return e.getParentDslHelenusPropertyNode();
|
||||
} else if (childDsl instanceof MapDsl) {
|
||||
MapDsl mapDsl = (MapDsl) childDsl;
|
||||
return mapDsl.getParent();
|
||||
} else if (childDsl instanceof ListDsl) {
|
||||
ListDsl listDsl = (ListDsl) childDsl;
|
||||
return listDsl.getParent();
|
||||
} else if (childDsl instanceof SetDsl) {
|
||||
SetDsl setDsl = (SetDsl) childDsl;
|
||||
return setDsl.getParent();
|
||||
}
|
||||
if (childDsl instanceof DslExportable) {
|
||||
DslExportable e = (DslExportable) childDsl;
|
||||
return e.getParentDslHelenusPropertyNode();
|
||||
} else if (childDsl instanceof MapDsl) {
|
||||
MapDsl mapDsl = (MapDsl) childDsl;
|
||||
return mapDsl.getParent();
|
||||
} else if (childDsl instanceof ListDsl) {
|
||||
ListDsl listDsl = (ListDsl) childDsl;
|
||||
return listDsl.getParent();
|
||||
} else if (childDsl instanceof SetDsl) {
|
||||
SetDsl setDsl = (SetDsl) childDsl;
|
||||
return setDsl.getParent();
|
||||
}
|
||||
|
||||
throw new HelenusMappingException("getter must reference to the dsl object " + getter);
|
||||
throw new HelenusMappingException("getter must reference to the dsl object " + getter);
|
||||
|
||||
} catch (DslPropertyException e) {
|
||||
return e.getPropertyNode();
|
||||
}
|
||||
}
|
||||
} catch (DslPropertyException e) {
|
||||
return e.getPropertyNode();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,28 +18,28 @@ package net.helenus.mapping;
|
|||
import net.helenus.support.HelenusMappingException;
|
||||
|
||||
public enum OrderingDirection {
|
||||
ASC("ASC"),
|
||||
ASC("ASC"),
|
||||
|
||||
DESC("DESC");
|
||||
DESC("DESC");
|
||||
|
||||
private final String cql;
|
||||
private final String cql;
|
||||
|
||||
private OrderingDirection(String cql) {
|
||||
this.cql = cql;
|
||||
}
|
||||
private OrderingDirection(String cql) {
|
||||
this.cql = cql;
|
||||
}
|
||||
|
||||
public String cql() {
|
||||
return cql;
|
||||
}
|
||||
public static OrderingDirection parseString(String name) {
|
||||
|
||||
public static OrderingDirection parseString(String name) {
|
||||
if (ASC.cql.equalsIgnoreCase(name)) {
|
||||
return ASC;
|
||||
} else if (DESC.cql.equalsIgnoreCase(name)) {
|
||||
return DESC;
|
||||
}
|
||||
|
||||
if (ASC.cql.equalsIgnoreCase(name)) {
|
||||
return ASC;
|
||||
} else if (DESC.cql.equalsIgnoreCase(name)) {
|
||||
return DESC;
|
||||
}
|
||||
throw new HelenusMappingException("invalid ordering direction name " + name);
|
||||
}
|
||||
|
||||
throw new HelenusMappingException("invalid ordering direction name " + name);
|
||||
}
|
||||
public String cql() {
|
||||
return cql;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,17 +18,16 @@ package net.helenus.mapping;
|
|||
import java.util.Comparator;
|
||||
|
||||
public enum TypeAndOrdinalColumnComparator implements Comparator<HelenusProperty> {
|
||||
INSTANCE;
|
||||
INSTANCE;
|
||||
|
||||
public int compare(HelenusProperty thisVal, HelenusProperty anotherVal) {
|
||||
public int compare(HelenusProperty thisVal, HelenusProperty anotherVal) {
|
||||
|
||||
int c =
|
||||
Integer.compare(thisVal.getColumnType().ordinal(), anotherVal.getColumnType().ordinal());
|
||||
int c = Integer.compare(thisVal.getColumnType().ordinal(), anotherVal.getColumnType().ordinal());
|
||||
|
||||
if (c == 0) {
|
||||
c = Integer.compare(thisVal.getOrdinal(), anotherVal.getOrdinal());
|
||||
}
|
||||
if (c == 0) {
|
||||
c = Integer.compare(thisVal.getOrdinal(), anotherVal.getOrdinal());
|
||||
}
|
||||
|
||||
return c;
|
||||
}
|
||||
return c;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,78 +19,93 @@ import java.lang.annotation.ElementType;
|
|||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
import net.helenus.mapping.OrderingDirection;
|
||||
|
||||
/**
|
||||
* ClusteringColumn is the family column in legacy Cassandra API
|
||||
*
|
||||
* <p>The purpose of this column is have additional dimension in the table. Both @PartitionKey
|
||||
* and @ClusteringColumn together are parts of the primary key of the table. The primary difference
|
||||
* between them is that the first one is using for routing purposes in order to locate a data node
|
||||
* in the cluster, otherwise the second one is using inside the node to locate peace of data in
|
||||
* <p>
|
||||
* The purpose of this column is have additional dimension in the table.
|
||||
* Both @PartitionKey and @ClusteringColumn together are parts of the primary
|
||||
* key of the table. The primary difference between them is that the first one
|
||||
* is using for routing purposes in order to locate a data node in the cluster,
|
||||
* otherwise the second one is using inside the node to locate peace of data in
|
||||
* concrete machine.
|
||||
*
|
||||
* <p>ClusteringColumn can be represented as a Key in SortedMap that fully stored in a single node.
|
||||
* All developers must be careful for selecting fields for clustering columns, because all data
|
||||
* inside this SortedMap must fit in to one node.
|
||||
* <p>
|
||||
* ClusteringColumn can be represented as a Key in SortedMap that fully stored
|
||||
* in a single node. All developers must be careful for selecting fields for
|
||||
* clustering columns, because all data inside this SortedMap must fit in to one
|
||||
* node.
|
||||
*
|
||||
* <p>ClusteringColumn can have more than one part and the order of parts is important. This order
|
||||
* defines the way how Cassandra joins the parts and influence of data retrieval operations. Each
|
||||
* part can have ordering property that defines default ascending or descending order of data. In
|
||||
* case of two and more parts in select queries developer needs to have consisdent order of all
|
||||
* parts as they defined in table.
|
||||
* <p>
|
||||
* ClusteringColumn can have more than one part and the order of parts is
|
||||
* important. This order defines the way how Cassandra joins the parts and
|
||||
* influence of data retrieval operations. Each part can have ordering property
|
||||
* that defines default ascending or descending order of data. In case of two
|
||||
* and more parts in select queries developer needs to have consisdent order of
|
||||
* all parts as they defined in table.
|
||||
*
|
||||
* <p>For example, first part is ASC ordering, second is also ASC, so Cassandra will sort entries
|
||||
* like this: a-a a-b b-a b-b In this case we are able run queries: ORDER BY first ASC, second ASC
|
||||
* ORDER BY first DESC, second DESC WHERE first=? ORDER BY second ASC WHERE first=? ORDER BY second
|
||||
* DESC WHERE first=? AND second=?
|
||||
* <p>
|
||||
* For example, first part is ASC ordering, second is also ASC, so Cassandra
|
||||
* will sort entries like this: a-a a-b b-a b-b In this case we are able run
|
||||
* queries: ORDER BY first ASC, second ASC ORDER BY first DESC, second DESC
|
||||
* WHERE first=? ORDER BY second ASC WHERE first=? ORDER BY second DESC WHERE
|
||||
* first=? AND second=?
|
||||
*
|
||||
* <p>But, we can not run queries: ORDER BY first DESC, second ASC ORDER BY first ASC, second DESC
|
||||
* WHERE second=? ORDER BY first (ASC,DESC)
|
||||
* <p>
|
||||
* But, we can not run queries: ORDER BY first DESC, second ASC ORDER BY first
|
||||
* ASC, second DESC WHERE second=? ORDER BY first (ASC,DESC)
|
||||
*/
|
||||
@Retention(value = RetentionPolicy.RUNTIME)
|
||||
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
|
||||
public @interface ClusteringColumn {
|
||||
|
||||
/**
|
||||
* Default value is the name of the method normalized to underscore
|
||||
*
|
||||
* @return name of the column
|
||||
*/
|
||||
String value() default "";
|
||||
/**
|
||||
* Default value is the name of the method normalized to underscore
|
||||
*
|
||||
* @return name of the column
|
||||
*/
|
||||
String value() default "";
|
||||
|
||||
/**
|
||||
* ClusteringColumn parts must be ordered in the @Table. It is the requirement of Cassandra.
|
||||
* Cassandra joins all parts to the final clustering key that is stored in column family name.
|
||||
* Additionally all parts can have some ordering (ASC, DESC) that with sequence of parts
|
||||
* determines key comparison function, so Cassandra storing column family names always in sorted
|
||||
* order.
|
||||
*
|
||||
* <p>Be default ordinal has 0 value, that's because in most cases @Table have single column for
|
||||
* ClusteringColumn If you have 2 and more parts of the ClusteringColumn, then you need to use
|
||||
* ordinal() to define the sequence of the parts
|
||||
*
|
||||
* @return number that used to sort clustering columns
|
||||
*/
|
||||
int ordinal() default 0;
|
||||
/**
|
||||
* ClusteringColumn parts must be ordered in the @Table. It is the requirement
|
||||
* of Cassandra. Cassandra joins all parts to the final clustering key that is
|
||||
* stored in column family name. Additionally all parts can have some ordering
|
||||
* (ASC, DESC) that with sequence of parts determines key comparison function,
|
||||
* so Cassandra storing column family names always in sorted order.
|
||||
*
|
||||
* <p>
|
||||
* Be default ordinal has 0 value, that's because in most cases @Table have
|
||||
* single column for ClusteringColumn If you have 2 and more parts of the
|
||||
* ClusteringColumn, then you need to use ordinal() to define the sequence of
|
||||
* the parts
|
||||
*
|
||||
* @return number that used to sort clustering columns
|
||||
*/
|
||||
int ordinal() default 0;
|
||||
|
||||
/**
|
||||
* Default order of values in the ClusteringColumn This ordering is using for comparison of the
|
||||
* clustering column values when Cassandra stores it in the sorted order.
|
||||
*
|
||||
* <p>Default value is the ascending order
|
||||
*
|
||||
* @return ascending order or descending order of clustering column values
|
||||
*/
|
||||
OrderingDirection ordering() default OrderingDirection.ASC;
|
||||
/**
|
||||
* Default order of values in the ClusteringColumn This ordering is using for
|
||||
* comparison of the clustering column values when Cassandra stores it in the
|
||||
* sorted order.
|
||||
*
|
||||
* <p>
|
||||
* Default value is the ascending order
|
||||
*
|
||||
* @return ascending order or descending order of clustering column values
|
||||
*/
|
||||
OrderingDirection ordering() default OrderingDirection.ASC;
|
||||
|
||||
/**
|
||||
* For reserved words in Cassandra we need quotation in CQL queries. This property marks that the
|
||||
* name of the UDT type needs to be quoted.
|
||||
*
|
||||
* <p>Default value is false, we are quoting only selected names.
|
||||
*
|
||||
* @return true if name have to be quoted
|
||||
*/
|
||||
boolean forceQuote() default false;
|
||||
/**
|
||||
* For reserved words in Cassandra we need quotation in CQL queries. This
|
||||
* property marks that the name of the UDT type needs to be quoted.
|
||||
*
|
||||
* <p>
|
||||
* Default value is false, we are quoting only selected names.
|
||||
*
|
||||
* @return true if name have to be quoted
|
||||
*/
|
||||
boolean forceQuote() default false;
|
||||
}
|
||||
|
|
|
@ -18,45 +18,51 @@ package net.helenus.mapping.annotation;
|
|||
import java.lang.annotation.*;
|
||||
|
||||
/**
|
||||
* Column annotation is used to define additional properties of the column in entity mapping
|
||||
* interfaces: @Table, @UDT, @Tuple
|
||||
* Column annotation is used to define additional properties of the column in
|
||||
* entity mapping interfaces: @Table, @UDT, @Tuple
|
||||
*
|
||||
* <p>Column annotation can be used to override default name of the column or to setup order of the
|
||||
* columns in the mapping
|
||||
* <p>
|
||||
* Column annotation can be used to override default name of the column or to
|
||||
* setup order of the columns in the mapping
|
||||
*
|
||||
* <p>Usually for @Table and @UDT types it is not important to define order of the columns, but
|
||||
* in @Tuple mapping it is required, because tuple itself represents the sequence of the types with
|
||||
* particular order in the table's column
|
||||
* <p>
|
||||
* Usually for @Table and @UDT types it is not important to define order of the
|
||||
* columns, but in @Tuple mapping it is required, because tuple itself
|
||||
* represents the sequence of the types with particular order in the table's
|
||||
* column
|
||||
*/
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
|
||||
public @interface Column {
|
||||
|
||||
/**
|
||||
* Default value is the name of the method normalized to underscore
|
||||
*
|
||||
* @return name of the column
|
||||
*/
|
||||
String value() default "";
|
||||
/**
|
||||
* Default value is the name of the method normalized to underscore
|
||||
*
|
||||
* @return name of the column
|
||||
*/
|
||||
String value() default "";
|
||||
|
||||
/**
|
||||
* Ordinal will be used for ascending sorting of columns
|
||||
*
|
||||
* <p>Default value is 0, because not all mapping entities require all fields to have unique
|
||||
* ordinals, only @Tuple mapping entity requires all of them to be unique.
|
||||
*
|
||||
* @return number that used to sort columns, usually for @Tuple only
|
||||
*/
|
||||
int ordinal() default 0;
|
||||
/**
|
||||
* Ordinal will be used for ascending sorting of columns
|
||||
*
|
||||
* <p>
|
||||
* Default value is 0, because not all mapping entities require all fields to
|
||||
* have unique ordinals, only @Tuple mapping entity requires all of them to be
|
||||
* unique.
|
||||
*
|
||||
* @return number that used to sort columns, usually for @Tuple only
|
||||
*/
|
||||
int ordinal() default 0;
|
||||
|
||||
/**
|
||||
* For reserved words in Cassandra we need quotation in CQL queries. This property marks that the
|
||||
* name of the UDT type needs to be quoted.
|
||||
*
|
||||
* <p>Default value is false, we are quoting only selected names.
|
||||
*
|
||||
* @return true if name have to be quoted
|
||||
*/
|
||||
boolean forceQuote() default false;
|
||||
/**
|
||||
* For reserved words in Cassandra we need quotation in CQL queries. This
|
||||
* property marks that the name of the UDT type needs to be quoted.
|
||||
*
|
||||
* <p>
|
||||
* Default value is false, we are quoting only selected names.
|
||||
*
|
||||
* @return true if name have to be quoted
|
||||
*/
|
||||
boolean forceQuote() default false;
|
||||
}
|
||||
|
|
|
@ -16,200 +16,240 @@
|
|||
package net.helenus.mapping.annotation;
|
||||
|
||||
import java.lang.annotation.*;
|
||||
|
||||
import javax.validation.Constraint;
|
||||
|
||||
import net.helenus.mapping.validator.*;
|
||||
|
||||
/**
|
||||
* Constraint annotations are using for data integrity mostly for @java.lang.String types. The place
|
||||
* of the annotation is the particular method in model interface.
|
||||
* Constraint annotations are using for data integrity mostly
|
||||
* for @java.lang.String types. The place of the annotation is the particular
|
||||
* method in model interface.
|
||||
*
|
||||
* <p>All of them does not have effect on selects and data retrieval operations.
|
||||
* <p>
|
||||
* All of them does not have effect on selects and data retrieval operations.
|
||||
*
|
||||
* <p>Support types: - @NotNull supports any @java.lang.Object type - All annotations
|
||||
* support @java.lang.String type
|
||||
* <p>
|
||||
* Support types: - @NotNull supports any @java.lang.Object type - All
|
||||
* annotations support @java.lang.String type
|
||||
*/
|
||||
public final class Constraints {
|
||||
|
||||
private Constraints() {}
|
||||
private Constraints() {
|
||||
}
|
||||
|
||||
/**
|
||||
* NotNull annotation is using to check that value is not null before storing it
|
||||
*
|
||||
* <p>Applicable to use in any @java.lang.Object
|
||||
*
|
||||
* <p>It does not check on selects and data retrieval operations
|
||||
*/
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
|
||||
@Constraint(validatedBy = NotNullValidator.class)
|
||||
public @interface NotNull {}
|
||||
/**
|
||||
* NotNull annotation is using to check that value is not null before storing it
|
||||
*
|
||||
* <p>
|
||||
* Applicable to use in any @java.lang.Object
|
||||
*
|
||||
* <p>
|
||||
* It does not check on selects and data retrieval operations
|
||||
*/
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
|
||||
@Constraint(validatedBy = NotNullValidator.class)
|
||||
public @interface NotNull {
|
||||
}
|
||||
|
||||
/**
|
||||
* NotEmpty annotation is using to check that value has text before storing it
|
||||
*
|
||||
* <p>Also checks for the null and it is more strict annotation then @NotNull
|
||||
*
|
||||
* <p>Can be used for @java.lang.CharSequence, @ByteBuffer and any array
|
||||
*
|
||||
* <p>It does not check on selects and data retrieval operations
|
||||
*/
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
|
||||
@Constraint(validatedBy = NotEmptyValidator.class)
|
||||
public @interface NotEmpty {}
|
||||
/**
|
||||
* NotEmpty annotation is using to check that value has text before storing it
|
||||
*
|
||||
* <p>
|
||||
* Also checks for the null and it is more strict annotation then @NotNull
|
||||
*
|
||||
* <p>
|
||||
* Can be used for @java.lang.CharSequence, @ByteBuffer and any array
|
||||
*
|
||||
* <p>
|
||||
* It does not check on selects and data retrieval operations
|
||||
*/
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
|
||||
@Constraint(validatedBy = NotEmptyValidator.class)
|
||||
public @interface NotEmpty {
|
||||
}
|
||||
|
||||
/**
|
||||
* Email annotation is using to check that value has a valid email before storing it
|
||||
*
|
||||
* <p>Can be used only for @CharSequence
|
||||
*
|
||||
* <p>It does not check on selects and data retrieval operations
|
||||
*/
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
|
||||
@Constraint(validatedBy = EmailValidator.class)
|
||||
public @interface Email {}
|
||||
/**
|
||||
* Email annotation is using to check that value has a valid email before
|
||||
* storing it
|
||||
*
|
||||
* <p>
|
||||
* Can be used only for @CharSequence
|
||||
*
|
||||
* <p>
|
||||
* It does not check on selects and data retrieval operations
|
||||
*/
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
|
||||
@Constraint(validatedBy = EmailValidator.class)
|
||||
public @interface Email {
|
||||
}
|
||||
|
||||
/**
|
||||
* Number annotation is using to check that all letters in value are digits before storing it
|
||||
*
|
||||
* <p>Can be used only for @java.lang.CharSequence
|
||||
*
|
||||
* <p>It does not check on selects and data retrieval operations
|
||||
*/
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
|
||||
@Constraint(validatedBy = NumberValidator.class)
|
||||
public @interface Number {}
|
||||
/**
|
||||
* Number annotation is using to check that all letters in value are digits
|
||||
* before storing it
|
||||
*
|
||||
* <p>
|
||||
* Can be used only for @java.lang.CharSequence
|
||||
*
|
||||
* <p>
|
||||
* It does not check on selects and data retrieval operations
|
||||
*/
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
|
||||
@Constraint(validatedBy = NumberValidator.class)
|
||||
public @interface Number {
|
||||
}
|
||||
|
||||
/**
|
||||
* Alphabet annotation is using to check that all letters in value are in specific alphabet before
|
||||
* storing it
|
||||
*
|
||||
* <p>Can be used only for @java.lang.CharSequence
|
||||
*
|
||||
* <p>It does not check on selects and data retrieval operations
|
||||
*/
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
|
||||
@Constraint(validatedBy = AlphabetValidator.class)
|
||||
public @interface Alphabet {
|
||||
/**
|
||||
* Alphabet annotation is using to check that all letters in value are in
|
||||
* specific alphabet before storing it
|
||||
*
|
||||
* <p>
|
||||
* Can be used only for @java.lang.CharSequence
|
||||
*
|
||||
* <p>
|
||||
* It does not check on selects and data retrieval operations
|
||||
*/
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
|
||||
@Constraint(validatedBy = AlphabetValidator.class)
|
||||
public @interface Alphabet {
|
||||
|
||||
/**
|
||||
* Defines alphabet that will be used to check value
|
||||
*
|
||||
* @return alphabet characters in the string
|
||||
*/
|
||||
String value();
|
||||
}
|
||||
/**
|
||||
* Defines alphabet that will be used to check value
|
||||
*
|
||||
* @return alphabet characters in the string
|
||||
*/
|
||||
String value();
|
||||
}
|
||||
|
||||
/**
|
||||
* Length annotation is using to ensure that value has exact length before storing it
|
||||
*
|
||||
* <p>Can be used for @java.lang.CharSequence, @ByteBuffer and any array
|
||||
*
|
||||
* <p>It does not have effect on selects and data retrieval operations
|
||||
*/
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
|
||||
@Constraint(validatedBy = LengthValidator.class)
|
||||
public @interface Length {
|
||||
/**
|
||||
* Length annotation is using to ensure that value has exact length before
|
||||
* storing it
|
||||
*
|
||||
* <p>
|
||||
* Can be used for @java.lang.CharSequence, @ByteBuffer and any array
|
||||
*
|
||||
* <p>
|
||||
* It does not have effect on selects and data retrieval operations
|
||||
*/
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
|
||||
@Constraint(validatedBy = LengthValidator.class)
|
||||
public @interface Length {
|
||||
|
||||
int value();
|
||||
}
|
||||
int value();
|
||||
}
|
||||
|
||||
/**
|
||||
* MaxLength annotation is using to ensure that value has length less or equal to some threshold
|
||||
* before storing it
|
||||
*
|
||||
* <p>Can be used for @java.lang.CharSequence, @ByteBuffer and byte[]
|
||||
*
|
||||
* <p>It does not have effect on selects and data retrieval operations
|
||||
*/
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
|
||||
@Constraint(validatedBy = MaxLengthValidator.class)
|
||||
public @interface MaxLength {
|
||||
/**
|
||||
* MaxLength annotation is using to ensure that value has length less or equal
|
||||
* to some threshold before storing it
|
||||
*
|
||||
* <p>
|
||||
* Can be used for @java.lang.CharSequence, @ByteBuffer and byte[]
|
||||
*
|
||||
* <p>
|
||||
* It does not have effect on selects and data retrieval operations
|
||||
*/
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
|
||||
@Constraint(validatedBy = MaxLengthValidator.class)
|
||||
public @interface MaxLength {
|
||||
|
||||
int value();
|
||||
}
|
||||
int value();
|
||||
}
|
||||
|
||||
/**
|
||||
* MinLength annotation is using to ensure that value has length greater or equal to some
|
||||
* threshold before storing it
|
||||
*
|
||||
* <p>Can be used for @java.lang.CharSequence, @ByteBuffer and byte[]
|
||||
*
|
||||
* <p>It does not have effect on selects and data retrieval operations
|
||||
*/
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
|
||||
@Constraint(validatedBy = MinLengthValidator.class)
|
||||
public @interface MinLength {
|
||||
/**
|
||||
* MinLength annotation is using to ensure that value has length greater or
|
||||
* equal to some threshold before storing it
|
||||
*
|
||||
* <p>
|
||||
* Can be used for @java.lang.CharSequence, @ByteBuffer and byte[]
|
||||
*
|
||||
* <p>
|
||||
* It does not have effect on selects and data retrieval operations
|
||||
*/
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
|
||||
@Constraint(validatedBy = MinLengthValidator.class)
|
||||
public @interface MinLength {
|
||||
|
||||
int value();
|
||||
}
|
||||
int value();
|
||||
}
|
||||
|
||||
/**
|
||||
* LowerCase annotation is using to ensure that value is in lower case before storing it
|
||||
*
|
||||
* <p>Can be used only for @java.lang.CharSequence
|
||||
*
|
||||
* <p>It does not have effect on selects and data retrieval operations
|
||||
*/
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
|
||||
@Constraint(validatedBy = LowerCaseValidator.class)
|
||||
public @interface LowerCase {}
|
||||
/**
|
||||
* LowerCase annotation is using to ensure that value is in lower case before
|
||||
* storing it
|
||||
*
|
||||
* <p>
|
||||
* Can be used only for @java.lang.CharSequence
|
||||
*
|
||||
* <p>
|
||||
* It does not have effect on selects and data retrieval operations
|
||||
*/
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
|
||||
@Constraint(validatedBy = LowerCaseValidator.class)
|
||||
public @interface LowerCase {
|
||||
}
|
||||
|
||||
/**
|
||||
* UpperCase annotation is using to ensure that value is in upper case before storing it
|
||||
*
|
||||
* <p>Can be used only for @java.lang.CharSequence
|
||||
*
|
||||
* <p>It does not have effect on selects and data retrieval operations
|
||||
*/
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
|
||||
@Constraint(validatedBy = UpperCaseValidator.class)
|
||||
public @interface UpperCase {}
|
||||
/**
|
||||
* UpperCase annotation is using to ensure that value is in upper case before
|
||||
* storing it
|
||||
*
|
||||
* <p>
|
||||
* Can be used only for @java.lang.CharSequence
|
||||
*
|
||||
* <p>
|
||||
* It does not have effect on selects and data retrieval operations
|
||||
*/
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
|
||||
@Constraint(validatedBy = UpperCaseValidator.class)
|
||||
public @interface UpperCase {
|
||||
}
|
||||
|
||||
/**
|
||||
* Pattern annotation is LowerCase annotation is using to ensure that value is upper case before
|
||||
* storing it
|
||||
*
|
||||
* <p>Can be used only for @java.lang.CharSequence
|
||||
*
|
||||
* <p>It does not have effect on selects and data retrieval operations
|
||||
*/
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
|
||||
@Constraint(validatedBy = PatternValidator.class)
|
||||
public @interface Pattern {
|
||||
/**
|
||||
* Pattern annotation is LowerCase annotation is using to ensure that value is
|
||||
* upper case before storing it
|
||||
*
|
||||
* <p>
|
||||
* Can be used only for @java.lang.CharSequence
|
||||
*
|
||||
* <p>
|
||||
* It does not have effect on selects and data retrieval operations
|
||||
*/
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
|
||||
@Constraint(validatedBy = PatternValidator.class)
|
||||
public @interface Pattern {
|
||||
|
||||
/**
|
||||
* User defined regex expression to check match of the value
|
||||
*
|
||||
* @return Java regex pattern
|
||||
*/
|
||||
String value();
|
||||
/**
|
||||
* User defined regex expression to check match of the value
|
||||
*
|
||||
* @return Java regex pattern
|
||||
*/
|
||||
String value();
|
||||
|
||||
/**
|
||||
* Regex flags composition
|
||||
*
|
||||
* @return Java regex flags
|
||||
*/
|
||||
int flags();
|
||||
}
|
||||
/**
|
||||
* Regex flags composition
|
||||
*
|
||||
* @return Java regex flags
|
||||
*/
|
||||
int flags();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,48 +3,52 @@ package net.helenus.mapping.annotation;
|
|||
import java.lang.annotation.*;
|
||||
|
||||
/**
|
||||
* CoveringIndex annotation is using under the specific column or method in entity interface
|
||||
* with @Table annotation.
|
||||
* CoveringIndex annotation is using under the specific column or method in
|
||||
* entity interface with @Table annotation.
|
||||
*
|
||||
* <p>A corresponding materialized view will be created based on the underline @Table for the
|
||||
* specific column.
|
||||
* <p>
|
||||
* A corresponding materialized view will be created based on the
|
||||
* underline @Table for the specific column.
|
||||
*
|
||||
* <p>This is useful when you need to perform IN or SORT/ORDER-BY queries and to do so you'll need
|
||||
* different materialized table on disk in Cassandra.
|
||||
* <p>
|
||||
* This is useful when you need to perform IN or SORT/ORDER-BY queries and to do
|
||||
* so you'll need different materialized table on disk in Cassandra.
|
||||
*
|
||||
* <p>For each @Table annotated interface Helenus will create/update/verify Cassandra Materialized
|
||||
* Views and some indexes if needed on startup.
|
||||
* <p>
|
||||
* For each @Table annotated interface Helenus will create/update/verify
|
||||
* Cassandra Materialized Views and some indexes if needed on startup.
|
||||
*/
|
||||
@Inherited
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target({ElementType.TYPE})
|
||||
public @interface CoveringIndex {
|
||||
|
||||
/**
|
||||
* Defined the name of the index. By default the entity name with column name as suffix.
|
||||
*
|
||||
* @return name of the covering index
|
||||
*/
|
||||
String name() default "";
|
||||
/**
|
||||
* Defined the name of the index. By default the entity name with column name as
|
||||
* suffix.
|
||||
*
|
||||
* @return name of the covering index
|
||||
*/
|
||||
String name() default "";
|
||||
|
||||
/**
|
||||
* Set of fields in this entity to replicate in the index.
|
||||
*
|
||||
* @return array of the string names of the fields.
|
||||
*/
|
||||
String[] covering() default "";
|
||||
/**
|
||||
* Set of fields in this entity to replicate in the index.
|
||||
*
|
||||
* @return array of the string names of the fields.
|
||||
*/
|
||||
String[] covering() default "";
|
||||
|
||||
/**
|
||||
* Set of fields to use as the partition keys for this projection.
|
||||
*
|
||||
* @return array of the string names of the fields.
|
||||
*/
|
||||
String[] partitionKeys() default "";
|
||||
/**
|
||||
* Set of fields to use as the partition keys for this projection.
|
||||
*
|
||||
* @return array of the string names of the fields.
|
||||
*/
|
||||
String[] partitionKeys() default "";
|
||||
|
||||
/**
|
||||
* Set of fields to use as the clustering columns for this projection.
|
||||
*
|
||||
* @return array of the string names of the fields.
|
||||
*/
|
||||
String[] clusteringColumns() default "";
|
||||
/**
|
||||
* Set of fields to use as the clustering columns for this projection.
|
||||
*
|
||||
* @return array of the string names of the fields.
|
||||
*/
|
||||
String[] clusteringColumns() default "";
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue