Compare commits
No commits in common. "develop" and "gburd/wip-abstract-cache" have entirely different histories.
develop
...
gburd/wip-
315 changed files with 16438 additions and 19423 deletions
|
@ -3,6 +3,7 @@
|
|||
<component name="EclipseCodeFormatterProjectSettings">
|
||||
<option name="projectSpecificProfile">
|
||||
<ProjectSpecificProfile>
|
||||
<option name="formatter" value="ECLIPSE" />
|
||||
<option name="pathToConfigFileJava" value="$PROJECT_DIR$/../newton/formatting/onshape-eclipse-general-preferences.epf" />
|
||||
</ProjectSpecificProfile>
|
||||
</option>
|
||||
|
|
274
NOTES
274
NOTES
|
@ -1,35 +1,172 @@
|
|||
Operation/
|
||||
|-- AbstractStatementOperation
|
||||
| |-- AbstractOperation
|
||||
| | |-- AbstractFilterOperation
|
||||
| | | |-- CountOperation
|
||||
| | | |-- DeleteOperation
|
||||
| | | `-- UpdateOperation
|
||||
| | |-- BoundOperation
|
||||
| | `-- InsertOperation
|
||||
| |-- AbstractOptionalOperation
|
||||
| | |-- AbstractFilterOptionalOperation
|
||||
| | | |-- SelectFirstOperation
|
||||
| | | `-- SelectFirstTransformingOperation
|
||||
| | `-- BoundOptionalOperation
|
||||
| `-- AbstractStreamOperation
|
||||
| |-- AbstractFilterStreamOperation
|
||||
| | |-- SelectOperation
|
||||
| | `-- SelectTransformingOperation
|
||||
| `-- BoundStreamOperation
|
||||
|-- PreparedOperation
|
||||
|-- PreparedOptionalOperation
|
||||
`-- PreparedStreamOperation
|
||||
|
||||
|
||||
----
|
||||
@CompoundIndex()
|
||||
create a new col in the same table called __idx_a_b_c that the hash of the concatenated values in that order is stored, create a normal index for that (CREATE INDEX ...)
|
||||
if a query matches that set of columns then use that indexed col to fetch the desired results from that table
|
||||
could also work with .in() query if materialized view exists
|
||||
----
|
||||
|
||||
|
||||
|
||||
--- Cache
|
||||
// `E` is the type of the Entity class or one of:
|
||||
// - ResultSet
|
||||
// - ArrayTuple{N}
|
||||
// - Count
|
||||
// `F` is the type argument passed to us from HelenusSession DSL and carried on via one of the
|
||||
// Operation classes, it is going to be one of:
|
||||
// - ResultSet
|
||||
// - ArrayTuple{N}
|
||||
// - or a type previously registered as a HelenusEntity.
|
||||
// In the form of a:
|
||||
// - Stream<?> or an
|
||||
// - Optional<?>
|
||||
//
|
||||
// Operation/
|
||||
// |-- AbstractStatementOperation
|
||||
// | |-- AbstractOperation
|
||||
// | | |-- AbstractFilterOperation
|
||||
// | | | |-- CountOperation
|
||||
// | | | |-- DeleteOperation
|
||||
// | | | `-- UpdateOperation
|
||||
// | | |-- BoundOperation
|
||||
// | | `-- InsertOperation
|
||||
// | |-- AbstractOptionalOperation
|
||||
// | | |-- AbstractFilterOptionalOperation
|
||||
// | | | |-- SelectFirstOperation
|
||||
// | | | `-- SelectFirstTransformingOperation
|
||||
// | | `-- BoundOptionalOperation
|
||||
// | `-- AbstractStreamOperation
|
||||
// | |-- AbstractFilterStreamOperation
|
||||
// | | |-- SelectOperation
|
||||
// | | `-- SelectTransformingOperation
|
||||
// | `-- BoundStreamOperation
|
||||
// |-- PreparedOperation
|
||||
// |-- PreparedOptionalOperation
|
||||
// `-- PreparedStreamOperation
|
||||
//
|
||||
// These all boil down to: Select, Update, Insert, Delete and Count
|
||||
//
|
||||
// -- Select:
|
||||
// 1) Select statements that contain all primary key information will be "distinct" and
|
||||
// result in a single value or no match.
|
||||
// If present, return cached entity otherwise execute query and cache result.
|
||||
//
|
||||
// 2) Otherwise the result is a set, possibly empty, of values that match.
|
||||
// When within a UOW:
|
||||
// If present, return the cached value(s) from the statement cache matching the query string.
|
||||
// Otherwise, execute query and cache the result in the statement cache and update/merge the
|
||||
// entites into the entity cache.
|
||||
// NOTE: When we read data from the database we augment the select clause with TTL and write time
|
||||
// stamps for all columns that record such information so as to be able to properlty expire
|
||||
// and merge values in the cache.
|
||||
//
|
||||
// -- Update:
|
||||
// Execute the database statement and then iff successs upsert the entity being updated into the
|
||||
// entity cache.
|
||||
//
|
||||
// -- Insert/Upsert:
|
||||
// Same as Update.
|
||||
//
|
||||
// -- Delete:
|
||||
// Same as update, only remove the cached value from all caches on success.
|
||||
//
|
||||
// -- Count:
|
||||
// If operating within a UOW lookup count in statement cache, if not present execute query and cache result.
|
||||
//
|
||||
|
||||
|
||||
if (delegate instanceof SelectOperation) {
|
||||
SelectOperation<E> op = (SelectOperation<E>) delegate;
|
||||
|
||||
// Determine if we are caching and if so where.
|
||||
AbstractCache<CacheKey, Set<E>> cache = delegate.getCache();
|
||||
boolean prepareStatementForCaching = cache != null;
|
||||
if (uow != null) {
|
||||
prepareStatementForCaching = true;
|
||||
cache = uow.<Set<E>>getCacheEnclosing(cache);
|
||||
}
|
||||
|
||||
// The delegate will provide the cache key becuase it will either be:
|
||||
// a) when distinct: the combination of the partition/cluster key columns
|
||||
// b) otherwise: the table name followed by the portion of the SQL statement that would form the WHERE clause
|
||||
CacheKey key = (cache == null) ? null : delegate.getCacheKey();
|
||||
if (key != null && cache != null) {
|
||||
Set<E> value = cache.get(key);
|
||||
if (value != null) {
|
||||
// Select will always return a Stream<E>
|
||||
// TODO(gburd): SelectTransforming... apply fn here?
|
||||
result = (E) value.stream();
|
||||
if (cacheHitCounter != null) {
|
||||
cacheHitCounter.inc();
|
||||
}
|
||||
if (log != null) {
|
||||
log.info("cache hit");
|
||||
}
|
||||
return result;
|
||||
} else {
|
||||
if (cacheMissCounter != null) {
|
||||
cacheMissCounter.inc();
|
||||
}
|
||||
if (log != null) {
|
||||
log.info("cache miss");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
if (cache != null) {
|
||||
Object obj = delegate.unwrap(result);
|
||||
if (obj != null) {
|
||||
cache.put(key, obj);
|
||||
}
|
||||
|
||||
delegate.<E>extract(result, key, cache);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
// TODO: first, ask the delegate for the cacheKey
|
||||
// if this is a SELECT query:
|
||||
// if not in cache build the statement, execute the future, cache the result, transform the result then cache the transformations
|
||||
// if INSERT/UPSERT/UPDATE
|
||||
// if DELETE
|
||||
// if COUNT
|
||||
----------------------------
|
||||
|
||||
@Override
|
||||
public CacheKey getCacheKey() {
|
||||
|
||||
List<String>keys = new ArrayList<>(filters.size());
|
||||
HelenusEntity entity = props.get(0).getEntity();
|
||||
|
||||
for (HelenusPropertyNode prop : props) {
|
||||
switch(prop.getProperty().getColumnType()) {
|
||||
case PARTITION_KEY:
|
||||
case CLUSTERING_COLUMN:
|
||||
|
||||
Filter filter = filters.get(prop.getProperty());
|
||||
if (filter != null) {
|
||||
keys.add(filter.toString());
|
||||
} else {
|
||||
// we're missing a part of the primary key, so we can't create a proper cache key
|
||||
return null;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
// We've past the primary key components in this ordered list, so we're done building
|
||||
// the cache key.
|
||||
if (keys.size() > 0) {
|
||||
return new CacheKey(entity, Joiner.on(",").join(keys));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
---------------------------
|
||||
|
||||
// TODO(gburd): create a statement that matches one that wasn't prepared
|
||||
//String key =
|
||||
// "use " + preparedStatement.getQueryKeyspace() + "; " + preparedStatement.getQueryString();
|
||||
|
@ -38,6 +175,64 @@ could also work with .in() query if materialized view exists
|
|||
//}
|
||||
|
||||
|
||||
------------------------
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.ResultSetFuture;
|
||||
import com.datastax.driver.core.Statement;
|
||||
import com.google.common.cache.Cache;
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import com.google.common.cache.RemovalListener;
|
||||
import com.google.common.cache.RemovalNotification;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
public abstract class AbstractCache<K, V> {
|
||||
final Logger logger = LoggerFactory.getLogger(getClass());
|
||||
public Cache<K, V> cache;
|
||||
|
||||
public AbstractCache() {
|
||||
RemovalListener<K, V> listener =
|
||||
new RemovalListener<K, V>() {
|
||||
@Override
|
||||
public void onRemoval(RemovalNotification<K, V> n) {
|
||||
if (n.wasEvicted()) {
|
||||
String cause = n.getCause().name();
|
||||
logger.info(cause);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
cache = CacheBuilder.newBuilder()
|
||||
.maximumSize(10_000)
|
||||
.expireAfterAccess(20, TimeUnit.MINUTES)
|
||||
.weakKeys()
|
||||
.softValues()
|
||||
.removalListener(listener)
|
||||
.build();
|
||||
}
|
||||
|
||||
V get(K key) {
|
||||
return cache.getIfPresent(key);
|
||||
}
|
||||
|
||||
void put(K key, V value) {
|
||||
cache.put(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
------------------------------------------------------------------------------------------------
|
||||
|
||||
cache entites (2 methods) marked @Cacheable
|
||||
cache entites in txn context
|
||||
cache results when .cache() chained before .{a}sync() call, return a EvictableCacheItem<E> that has an .evict() method
|
||||
fix txn .andThen() chains
|
||||
|
||||
|
||||
|
||||
|
||||
primitive types have default values, (e.g. boolean, int, ...) but primative wrapper classes do not and can be null (e.g. Boolean, Integer, ...)
|
||||
|
@ -177,26 +372,3 @@ begin:
|
|||
cache.put
|
||||
}
|
||||
*/
|
||||
------------------
|
||||
|
||||
InsertOperation
|
||||
|
||||
|
||||
Class<?> iface = entity.getMappingInterface();
|
||||
boolean includesNonIdentityValues = values.stream().map(t -> {
|
||||
ColumnType type = t._1.getProperty().getColumnType();
|
||||
return !((type == ColumnType.PARTITION_KEY) || (type == ColumnType.CLUSTERING_COLUMN));
|
||||
})
|
||||
.reduce(false, (acc, t) -> acc || t);
|
||||
if (resultType == iface) {
|
||||
if (values.size() > 0 && includesNonIdentityValues) {
|
||||
boolean immutable = iface.isAssignableFrom(Drafted.class);
|
||||
-------------------
|
||||
|
||||
final Object value;
|
||||
if (method.getParameterCount() == 1 && args[0] instanceof Boolean && src instanceof ValueProviderMap) {
|
||||
value = ((ValueProviderMap)src).get(methodName, (Boolean)args[0]);
|
||||
} else {
|
||||
value = src.get(methodName);
|
||||
}
|
||||
--------------------
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
#!/usr/bin/env bash
|
||||
#!/bin/bash
|
||||
|
||||
mvn clean jar:jar javadoc:jar source:jar deploy -Prelease
|
||||
|
|
|
@ -1,14 +1,7 @@
|
|||
#!/usr/bin/env bash
|
||||
#!/bin/bash
|
||||
|
||||
if [ "X$1" == "Xall" ]; then
|
||||
for f in $(find ./src -name \*.java); do
|
||||
for f in $(find ./src -name \*.java); do
|
||||
echo Formatting $f
|
||||
java -jar ./lib/google-java-format-1.3-all-deps.jar --replace $f
|
||||
done
|
||||
else
|
||||
for file in $(git status --short | awk '{print $2}'); do
|
||||
echo $file
|
||||
java -jar ./lib/google-java-format-1.3-all-deps.jar --replace $file
|
||||
done
|
||||
fi
|
||||
done
|
||||
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
#!/usr/bin/env bash
|
||||
#!/bin/bash
|
||||
|
||||
mvn clean jar:jar javadoc:jar source:jar install -Prelease
|
||||
|
|
90
build.gradle
Normal file
90
build.gradle
Normal file
|
@ -0,0 +1,90 @@
|
|||
// gradle wrapper
|
||||
// ./gradlew clean generateLock saveLock
|
||||
// ./gradlew compileJava
|
||||
// ./gradlew run
|
||||
// ./gradlew run --debug-jvm
|
||||
// ./gradlew publishToMavenLocal
|
||||
|
||||
|
||||
buildscript {
|
||||
ext {}
|
||||
repositories {
|
||||
jcenter()
|
||||
mavenLocal()
|
||||
mavenCentral()
|
||||
maven { url "https://clojars.org/repo" }
|
||||
maven { url "https://plugins.gradle.org/m2/" }
|
||||
}
|
||||
dependencies {
|
||||
classpath 'com.netflix.nebula:gradle-dependency-lock-plugin:4.+'
|
||||
classpath 'com.uber:okbuck:0.19.0'
|
||||
}
|
||||
}
|
||||
|
||||
apply plugin: 'java'
|
||||
apply plugin: 'idea'
|
||||
apply plugin: 'eclipse'
|
||||
apply plugin: 'java-library'
|
||||
apply plugin: 'maven-publish'
|
||||
apply plugin: 'com.uber.okbuck'
|
||||
apply plugin: 'nebula.dependency-lock'
|
||||
|
||||
task wrapper(type: Wrapper) {
|
||||
gradleVersion = '4.0.2'
|
||||
}
|
||||
|
||||
jar {
|
||||
baseName = 'helenus'
|
||||
group = 'net.helenus'
|
||||
version = '2.0.17-SNAPSHOT'
|
||||
}
|
||||
|
||||
description = """helenus"""
|
||||
|
||||
sourceCompatibility = 1.8
|
||||
targetCompatibility = 1.8
|
||||
tasks.withType(JavaCompile) {
|
||||
options.encoding = 'UTF-8'
|
||||
}
|
||||
|
||||
configurations.all {
|
||||
}
|
||||
|
||||
repositories {
|
||||
jcenter()
|
||||
mavenLocal()
|
||||
mavenCentral()
|
||||
maven { url "file:///Users/gburd/ws/helenus/lib" }
|
||||
maven { url "https://oss.sonatype.org/content/repositories/snapshots" }
|
||||
maven { url "http://repo.maven.apache.org/maven2" }
|
||||
}
|
||||
dependencies {
|
||||
compile group: 'com.datastax.cassandra', name: 'cassandra-driver-core', version: '3.3.0'
|
||||
compile group: 'org.aspectj', name: 'aspectjrt', version: '1.8.10'
|
||||
compile group: 'org.aspectj', name: 'aspectjweaver', version: '1.8.10'
|
||||
compile group: 'org.apache.commons', name: 'commons-lang3', version: '3.6'
|
||||
compile group: 'org.springframework', name: 'spring-core', version: '4.3.10.RELEASE'
|
||||
compile group: 'com.google.guava', name: 'guava', version: '20.0'
|
||||
compile group: 'com.diffplug.durian', name: 'durian', version: '3.+'
|
||||
compile group: 'io.zipkin.java', name: 'zipkin', version: '1.29.2'
|
||||
compile group: 'io.zipkin.brave', name: 'brave', version: '4.0.6'
|
||||
compile group: 'io.dropwizard.metrics', name: 'metrics-core', version: '3.2.2'
|
||||
compile group: 'javax.validation', name: 'validation-api', version: '2.0.0.CR3'
|
||||
compile group: 'org.slf4j', name: 'slf4j-api', version: '1.7.1'
|
||||
|
||||
runtime group: 'org.slf4j', name: 'jcl-over-slf4j', version: '1.7.1'
|
||||
|
||||
testCompile group: 'org.codehaus.jackson', name: 'jackson-mapper-asl', version: '1.9.13'
|
||||
testCompile group: 'com.anthemengineering.mojo', name: 'infer-maven-plugin', version: '0.1.0'
|
||||
testCompile group: 'org.codehaus.jackson', name: 'jackson-core-asl', version: '1.9.13'
|
||||
testCompile(group: 'org.cassandraunit', name: 'cassandra-unit', version: '3.1.4.0-SNAPSHOT') {
|
||||
exclude(module: 'cassandra-driver-core')
|
||||
}
|
||||
testCompile group: 'org.apache.cassandra', name: 'cassandra-all', version: '3.11.0'
|
||||
testCompile group: 'commons-io', name: 'commons-io', version: '2.5'
|
||||
testCompile group: 'junit', name: 'junit', version: '4.12'
|
||||
testCompile group: 'com.github.stephenc', name: 'jamm', version: '0.2.5'
|
||||
testCompile group: 'org.hamcrest', name: 'hamcrest-library', version: '1.3'
|
||||
testCompile group: 'org.hamcrest', name: 'hamcrest-core', version: '1.3'
|
||||
testCompile group: 'org.mockito', name: 'mockito-core', version: '2.8.47'
|
||||
}
|
648
dependencies.lock
Normal file
648
dependencies.lock
Normal file
|
@ -0,0 +1,648 @@
|
|||
{
|
||||
"compile": {
|
||||
"com.datastax.cassandra:cassandra-driver-core": {
|
||||
"locked": "3.3.0",
|
||||
"requested": "3.3.0"
|
||||
},
|
||||
"com.diffplug.durian:durian": {
|
||||
"locked": "3.5.0-SNAPSHOT",
|
||||
"requested": "3.+"
|
||||
},
|
||||
"com.google.guava:guava": {
|
||||
"locked": "20.0",
|
||||
"requested": "20.0"
|
||||
},
|
||||
"io.dropwizard.metrics:metrics-core": {
|
||||
"locked": "3.2.2",
|
||||
"requested": "3.2.2"
|
||||
},
|
||||
"io.zipkin.brave:brave": {
|
||||
"locked": "4.0.6",
|
||||
"requested": "4.0.6"
|
||||
},
|
||||
"io.zipkin.java:zipkin": {
|
||||
"locked": "1.29.2",
|
||||
"requested": "1.29.2"
|
||||
},
|
||||
"javax.validation:validation-api": {
|
||||
"locked": "2.0.0.CR3",
|
||||
"requested": "2.0.0.CR3"
|
||||
},
|
||||
"org.apache.commons:commons-lang3": {
|
||||
"locked": "3.6",
|
||||
"requested": "3.6"
|
||||
},
|
||||
"org.aspectj:aspectjrt": {
|
||||
"locked": "1.8.10",
|
||||
"requested": "1.8.10"
|
||||
},
|
||||
"org.aspectj:aspectjweaver": {
|
||||
"locked": "1.8.10",
|
||||
"requested": "1.8.10"
|
||||
},
|
||||
"org.slf4j:slf4j-api": {
|
||||
"locked": "1.7.25",
|
||||
"requested": "1.7.1"
|
||||
},
|
||||
"org.springframework:spring-core": {
|
||||
"locked": "4.3.10.RELEASE",
|
||||
"requested": "4.3.10.RELEASE"
|
||||
}
|
||||
},
|
||||
"compileClasspath": {
|
||||
"com.datastax.cassandra:cassandra-driver-core": {
|
||||
"locked": "3.3.0",
|
||||
"requested": "3.3.0"
|
||||
},
|
||||
"com.diffplug.durian:durian": {
|
||||
"locked": "3.5.0-SNAPSHOT",
|
||||
"requested": "3.+"
|
||||
},
|
||||
"com.google.guava:guava": {
|
||||
"locked": "20.0",
|
||||
"requested": "20.0"
|
||||
},
|
||||
"io.dropwizard.metrics:metrics-core": {
|
||||
"locked": "3.2.2",
|
||||
"requested": "3.2.2"
|
||||
},
|
||||
"io.zipkin.brave:brave": {
|
||||
"locked": "4.0.6",
|
||||
"requested": "4.0.6"
|
||||
},
|
||||
"io.zipkin.java:zipkin": {
|
||||
"locked": "1.29.2",
|
||||
"requested": "1.29.2"
|
||||
},
|
||||
"javax.validation:validation-api": {
|
||||
"locked": "2.0.0.CR3",
|
||||
"requested": "2.0.0.CR3"
|
||||
},
|
||||
"org.apache.commons:commons-lang3": {
|
||||
"locked": "3.6",
|
||||
"requested": "3.6"
|
||||
},
|
||||
"org.aspectj:aspectjrt": {
|
||||
"locked": "1.8.10",
|
||||
"requested": "1.8.10"
|
||||
},
|
||||
"org.aspectj:aspectjweaver": {
|
||||
"locked": "1.8.10",
|
||||
"requested": "1.8.10"
|
||||
},
|
||||
"org.slf4j:slf4j-api": {
|
||||
"locked": "1.7.25",
|
||||
"requested": "1.7.1"
|
||||
},
|
||||
"org.springframework:spring-core": {
|
||||
"locked": "4.3.10.RELEASE",
|
||||
"requested": "4.3.10.RELEASE"
|
||||
}
|
||||
},
|
||||
"default": {
|
||||
"com.datastax.cassandra:cassandra-driver-core": {
|
||||
"locked": "3.3.0",
|
||||
"requested": "3.3.0"
|
||||
},
|
||||
"com.diffplug.durian:durian": {
|
||||
"locked": "3.5.0-SNAPSHOT",
|
||||
"requested": "3.+"
|
||||
},
|
||||
"com.google.guava:guava": {
|
||||
"locked": "20.0",
|
||||
"requested": "20.0"
|
||||
},
|
||||
"io.dropwizard.metrics:metrics-core": {
|
||||
"locked": "3.2.2",
|
||||
"requested": "3.2.2"
|
||||
},
|
||||
"io.zipkin.brave:brave": {
|
||||
"locked": "4.0.6",
|
||||
"requested": "4.0.6"
|
||||
},
|
||||
"io.zipkin.java:zipkin": {
|
||||
"locked": "1.29.2",
|
||||
"requested": "1.29.2"
|
||||
},
|
||||
"javax.validation:validation-api": {
|
||||
"locked": "2.0.0.CR3",
|
||||
"requested": "2.0.0.CR3"
|
||||
},
|
||||
"org.apache.commons:commons-lang3": {
|
||||
"locked": "3.6",
|
||||
"requested": "3.6"
|
||||
},
|
||||
"org.aspectj:aspectjrt": {
|
||||
"locked": "1.8.10",
|
||||
"requested": "1.8.10"
|
||||
},
|
||||
"org.aspectj:aspectjweaver": {
|
||||
"locked": "1.8.10",
|
||||
"requested": "1.8.10"
|
||||
},
|
||||
"org.slf4j:jcl-over-slf4j": {
|
||||
"locked": "1.7.1",
|
||||
"requested": "1.7.1"
|
||||
},
|
||||
"org.slf4j:slf4j-api": {
|
||||
"locked": "1.7.25",
|
||||
"requested": "1.7.1"
|
||||
},
|
||||
"org.springframework:spring-core": {
|
||||
"locked": "4.3.10.RELEASE",
|
||||
"requested": "4.3.10.RELEASE"
|
||||
}
|
||||
},
|
||||
"runtime": {
|
||||
"com.datastax.cassandra:cassandra-driver-core": {
|
||||
"locked": "3.3.0",
|
||||
"requested": "3.3.0"
|
||||
},
|
||||
"com.diffplug.durian:durian": {
|
||||
"locked": "3.5.0-SNAPSHOT",
|
||||
"requested": "3.+"
|
||||
},
|
||||
"com.google.guava:guava": {
|
||||
"locked": "20.0",
|
||||
"requested": "20.0"
|
||||
},
|
||||
"io.dropwizard.metrics:metrics-core": {
|
||||
"locked": "3.2.2",
|
||||
"requested": "3.2.2"
|
||||
},
|
||||
"io.zipkin.brave:brave": {
|
||||
"locked": "4.0.6",
|
||||
"requested": "4.0.6"
|
||||
},
|
||||
"io.zipkin.java:zipkin": {
|
||||
"locked": "1.29.2",
|
||||
"requested": "1.29.2"
|
||||
},
|
||||
"javax.validation:validation-api": {
|
||||
"locked": "2.0.0.CR3",
|
||||
"requested": "2.0.0.CR3"
|
||||
},
|
||||
"org.apache.commons:commons-lang3": {
|
||||
"locked": "3.6",
|
||||
"requested": "3.6"
|
||||
},
|
||||
"org.aspectj:aspectjrt": {
|
||||
"locked": "1.8.10",
|
||||
"requested": "1.8.10"
|
||||
},
|
||||
"org.aspectj:aspectjweaver": {
|
||||
"locked": "1.8.10",
|
||||
"requested": "1.8.10"
|
||||
},
|
||||
"org.slf4j:jcl-over-slf4j": {
|
||||
"locked": "1.7.1",
|
||||
"requested": "1.7.1"
|
||||
},
|
||||
"org.slf4j:slf4j-api": {
|
||||
"locked": "1.7.25",
|
||||
"requested": "1.7.1"
|
||||
},
|
||||
"org.springframework:spring-core": {
|
||||
"locked": "4.3.10.RELEASE",
|
||||
"requested": "4.3.10.RELEASE"
|
||||
}
|
||||
},
|
||||
"runtimeClasspath": {
|
||||
"com.datastax.cassandra:cassandra-driver-core": {
|
||||
"locked": "3.3.0",
|
||||
"requested": "3.3.0"
|
||||
},
|
||||
"com.diffplug.durian:durian": {
|
||||
"locked": "3.5.0-SNAPSHOT",
|
||||
"requested": "3.+"
|
||||
},
|
||||
"com.google.guava:guava": {
|
||||
"locked": "20.0",
|
||||
"requested": "20.0"
|
||||
},
|
||||
"io.dropwizard.metrics:metrics-core": {
|
||||
"locked": "3.2.2",
|
||||
"requested": "3.2.2"
|
||||
},
|
||||
"io.zipkin.brave:brave": {
|
||||
"locked": "4.0.6",
|
||||
"requested": "4.0.6"
|
||||
},
|
||||
"io.zipkin.java:zipkin": {
|
||||
"locked": "1.29.2",
|
||||
"requested": "1.29.2"
|
||||
},
|
||||
"javax.validation:validation-api": {
|
||||
"locked": "2.0.0.CR3",
|
||||
"requested": "2.0.0.CR3"
|
||||
},
|
||||
"org.apache.commons:commons-lang3": {
|
||||
"locked": "3.6",
|
||||
"requested": "3.6"
|
||||
},
|
||||
"org.aspectj:aspectjrt": {
|
||||
"locked": "1.8.10",
|
||||
"requested": "1.8.10"
|
||||
},
|
||||
"org.aspectj:aspectjweaver": {
|
||||
"locked": "1.8.10",
|
||||
"requested": "1.8.10"
|
||||
},
|
||||
"org.slf4j:jcl-over-slf4j": {
|
||||
"locked": "1.7.1",
|
||||
"requested": "1.7.1"
|
||||
},
|
||||
"org.slf4j:slf4j-api": {
|
||||
"locked": "1.7.25",
|
||||
"requested": "1.7.1"
|
||||
},
|
||||
"org.springframework:spring-core": {
|
||||
"locked": "4.3.10.RELEASE",
|
||||
"requested": "4.3.10.RELEASE"
|
||||
}
|
||||
},
|
||||
"testCompile": {
|
||||
"com.anthemengineering.mojo:infer-maven-plugin": {
|
||||
"locked": "0.1.0",
|
||||
"requested": "0.1.0"
|
||||
},
|
||||
"com.datastax.cassandra:cassandra-driver-core": {
|
||||
"locked": "3.3.0",
|
||||
"requested": "3.3.0"
|
||||
},
|
||||
"com.diffplug.durian:durian": {
|
||||
"locked": "3.5.0-SNAPSHOT",
|
||||
"requested": "3.+"
|
||||
},
|
||||
"com.github.stephenc:jamm": {
|
||||
"locked": "0.2.5",
|
||||
"requested": "0.2.5"
|
||||
},
|
||||
"com.google.guava:guava": {
|
||||
"locked": "21.0",
|
||||
"requested": "20.0"
|
||||
},
|
||||
"commons-io:commons-io": {
|
||||
"locked": "2.5",
|
||||
"requested": "2.5"
|
||||
},
|
||||
"io.dropwizard.metrics:metrics-core": {
|
||||
"locked": "3.2.2",
|
||||
"requested": "3.2.2"
|
||||
},
|
||||
"io.zipkin.brave:brave": {
|
||||
"locked": "4.0.6",
|
||||
"requested": "4.0.6"
|
||||
},
|
||||
"io.zipkin.java:zipkin": {
|
||||
"locked": "1.29.2",
|
||||
"requested": "1.29.2"
|
||||
},
|
||||
"javax.validation:validation-api": {
|
||||
"locked": "2.0.0.CR3",
|
||||
"requested": "2.0.0.CR3"
|
||||
},
|
||||
"junit:junit": {
|
||||
"locked": "4.12",
|
||||
"requested": "4.12"
|
||||
},
|
||||
"org.apache.cassandra:cassandra-all": {
|
||||
"locked": "3.11.0",
|
||||
"requested": "3.11.0"
|
||||
},
|
||||
"org.apache.commons:commons-lang3": {
|
||||
"locked": "3.6",
|
||||
"requested": "3.6"
|
||||
},
|
||||
"org.aspectj:aspectjrt": {
|
||||
"locked": "1.8.10",
|
||||
"requested": "1.8.10"
|
||||
},
|
||||
"org.aspectj:aspectjweaver": {
|
||||
"locked": "1.8.10",
|
||||
"requested": "1.8.10"
|
||||
},
|
||||
"org.cassandraunit:cassandra-unit": {
|
||||
"locked": "3.1.4.0-SNAPSHOT",
|
||||
"requested": "3.1.4.0-SNAPSHOT"
|
||||
},
|
||||
"org.codehaus.jackson:jackson-core-asl": {
|
||||
"locked": "1.9.13",
|
||||
"requested": "1.9.13"
|
||||
},
|
||||
"org.codehaus.jackson:jackson-mapper-asl": {
|
||||
"locked": "1.9.13",
|
||||
"requested": "1.9.13"
|
||||
},
|
||||
"org.hamcrest:hamcrest-core": {
|
||||
"locked": "1.3",
|
||||
"requested": "1.3"
|
||||
},
|
||||
"org.hamcrest:hamcrest-library": {
|
||||
"locked": "1.3",
|
||||
"requested": "1.3"
|
||||
},
|
||||
"org.mockito:mockito-core": {
|
||||
"locked": "2.8.47",
|
||||
"requested": "2.8.47"
|
||||
},
|
||||
"org.slf4j:slf4j-api": {
|
||||
"locked": "1.7.25",
|
||||
"requested": "1.7.1"
|
||||
},
|
||||
"org.springframework:spring-core": {
|
||||
"locked": "4.3.10.RELEASE",
|
||||
"requested": "4.3.10.RELEASE"
|
||||
}
|
||||
},
|
||||
"testCompileClasspath": {
|
||||
"com.anthemengineering.mojo:infer-maven-plugin": {
|
||||
"locked": "0.1.0",
|
||||
"requested": "0.1.0"
|
||||
},
|
||||
"com.datastax.cassandra:cassandra-driver-core": {
|
||||
"locked": "3.3.0",
|
||||
"requested": "3.3.0"
|
||||
},
|
||||
"com.diffplug.durian:durian": {
|
||||
"locked": "3.5.0-SNAPSHOT",
|
||||
"requested": "3.+"
|
||||
},
|
||||
"com.github.stephenc:jamm": {
|
||||
"locked": "0.2.5",
|
||||
"requested": "0.2.5"
|
||||
},
|
||||
"com.google.guava:guava": {
|
||||
"locked": "21.0",
|
||||
"requested": "20.0"
|
||||
},
|
||||
"commons-io:commons-io": {
|
||||
"locked": "2.5",
|
||||
"requested": "2.5"
|
||||
},
|
||||
"io.dropwizard.metrics:metrics-core": {
|
||||
"locked": "3.2.2",
|
||||
"requested": "3.2.2"
|
||||
},
|
||||
"io.zipkin.brave:brave": {
|
||||
"locked": "4.0.6",
|
||||
"requested": "4.0.6"
|
||||
},
|
||||
"io.zipkin.java:zipkin": {
|
||||
"locked": "1.29.2",
|
||||
"requested": "1.29.2"
|
||||
},
|
||||
"javax.validation:validation-api": {
|
||||
"locked": "2.0.0.CR3",
|
||||
"requested": "2.0.0.CR3"
|
||||
},
|
||||
"junit:junit": {
|
||||
"locked": "4.12",
|
||||
"requested": "4.12"
|
||||
},
|
||||
"org.apache.cassandra:cassandra-all": {
|
||||
"locked": "3.11.0",
|
||||
"requested": "3.11.0"
|
||||
},
|
||||
"org.apache.commons:commons-lang3": {
|
||||
"locked": "3.6",
|
||||
"requested": "3.6"
|
||||
},
|
||||
"org.aspectj:aspectjrt": {
|
||||
"locked": "1.8.10",
|
||||
"requested": "1.8.10"
|
||||
},
|
||||
"org.aspectj:aspectjweaver": {
|
||||
"locked": "1.8.10",
|
||||
"requested": "1.8.10"
|
||||
},
|
||||
"org.cassandraunit:cassandra-unit": {
|
||||
"locked": "3.1.4.0-SNAPSHOT",
|
||||
"requested": "3.1.4.0-SNAPSHOT"
|
||||
},
|
||||
"org.codehaus.jackson:jackson-core-asl": {
|
||||
"locked": "1.9.13",
|
||||
"requested": "1.9.13"
|
||||
},
|
||||
"org.codehaus.jackson:jackson-mapper-asl": {
|
||||
"locked": "1.9.13",
|
||||
"requested": "1.9.13"
|
||||
},
|
||||
"org.hamcrest:hamcrest-core": {
|
||||
"locked": "1.3",
|
||||
"requested": "1.3"
|
||||
},
|
||||
"org.hamcrest:hamcrest-library": {
|
||||
"locked": "1.3",
|
||||
"requested": "1.3"
|
||||
},
|
||||
"org.mockito:mockito-core": {
|
||||
"locked": "2.8.47",
|
||||
"requested": "2.8.47"
|
||||
},
|
||||
"org.slf4j:slf4j-api": {
|
||||
"locked": "1.7.25",
|
||||
"requested": "1.7.1"
|
||||
},
|
||||
"org.springframework:spring-core": {
|
||||
"locked": "4.3.10.RELEASE",
|
||||
"requested": "4.3.10.RELEASE"
|
||||
}
|
||||
},
|
||||
"testRuntime": {
|
||||
"com.anthemengineering.mojo:infer-maven-plugin": {
|
||||
"locked": "0.1.0",
|
||||
"requested": "0.1.0"
|
||||
},
|
||||
"com.datastax.cassandra:cassandra-driver-core": {
|
||||
"locked": "3.3.0",
|
||||
"requested": "3.3.0"
|
||||
},
|
||||
"com.diffplug.durian:durian": {
|
||||
"locked": "3.5.0-SNAPSHOT",
|
||||
"requested": "3.+"
|
||||
},
|
||||
"com.github.stephenc:jamm": {
|
||||
"locked": "0.2.5",
|
||||
"requested": "0.2.5"
|
||||
},
|
||||
"com.google.guava:guava": {
|
||||
"locked": "21.0",
|
||||
"requested": "20.0"
|
||||
},
|
||||
"commons-io:commons-io": {
|
||||
"locked": "2.5",
|
||||
"requested": "2.5"
|
||||
},
|
||||
"io.dropwizard.metrics:metrics-core": {
|
||||
"locked": "3.2.2",
|
||||
"requested": "3.2.2"
|
||||
},
|
||||
"io.zipkin.brave:brave": {
|
||||
"locked": "4.0.6",
|
||||
"requested": "4.0.6"
|
||||
},
|
||||
"io.zipkin.java:zipkin": {
|
||||
"locked": "1.29.2",
|
||||
"requested": "1.29.2"
|
||||
},
|
||||
"javax.validation:validation-api": {
|
||||
"locked": "2.0.0.CR3",
|
||||
"requested": "2.0.0.CR3"
|
||||
},
|
||||
"junit:junit": {
|
||||
"locked": "4.12",
|
||||
"requested": "4.12"
|
||||
},
|
||||
"org.apache.cassandra:cassandra-all": {
|
||||
"locked": "3.11.0",
|
||||
"requested": "3.11.0"
|
||||
},
|
||||
"org.apache.commons:commons-lang3": {
|
||||
"locked": "3.6",
|
||||
"requested": "3.6"
|
||||
},
|
||||
"org.aspectj:aspectjrt": {
|
||||
"locked": "1.8.10",
|
||||
"requested": "1.8.10"
|
||||
},
|
||||
"org.aspectj:aspectjweaver": {
|
||||
"locked": "1.8.10",
|
||||
"requested": "1.8.10"
|
||||
},
|
||||
"org.cassandraunit:cassandra-unit": {
|
||||
"locked": "3.1.4.0-SNAPSHOT",
|
||||
"requested": "3.1.4.0-SNAPSHOT"
|
||||
},
|
||||
"org.codehaus.jackson:jackson-core-asl": {
|
||||
"locked": "1.9.13",
|
||||
"requested": "1.9.13"
|
||||
},
|
||||
"org.codehaus.jackson:jackson-mapper-asl": {
|
||||
"locked": "1.9.13",
|
||||
"requested": "1.9.13"
|
||||
},
|
||||
"org.hamcrest:hamcrest-core": {
|
||||
"locked": "1.3",
|
||||
"requested": "1.3"
|
||||
},
|
||||
"org.hamcrest:hamcrest-library": {
|
||||
"locked": "1.3",
|
||||
"requested": "1.3"
|
||||
},
|
||||
"org.mockito:mockito-core": {
|
||||
"locked": "2.8.47",
|
||||
"requested": "2.8.47"
|
||||
},
|
||||
"org.slf4j:jcl-over-slf4j": {
|
||||
"locked": "1.7.7",
|
||||
"requested": "1.7.1"
|
||||
},
|
||||
"org.slf4j:slf4j-api": {
|
||||
"locked": "1.7.25",
|
||||
"requested": "1.7.1"
|
||||
},
|
||||
"org.springframework:spring-core": {
|
||||
"locked": "4.3.10.RELEASE",
|
||||
"requested": "4.3.10.RELEASE"
|
||||
}
|
||||
},
|
||||
"testRuntimeClasspath": {
|
||||
"com.anthemengineering.mojo:infer-maven-plugin": {
|
||||
"locked": "0.1.0",
|
||||
"requested": "0.1.0"
|
||||
},
|
||||
"com.datastax.cassandra:cassandra-driver-core": {
|
||||
"locked": "3.3.0",
|
||||
"requested": "3.3.0"
|
||||
},
|
||||
"com.diffplug.durian:durian": {
|
||||
"locked": "3.5.0-SNAPSHOT",
|
||||
"requested": "3.+"
|
||||
},
|
||||
"com.github.stephenc:jamm": {
|
||||
"locked": "0.2.5",
|
||||
"requested": "0.2.5"
|
||||
},
|
||||
"com.google.guava:guava": {
|
||||
"locked": "21.0",
|
||||
"requested": "20.0"
|
||||
},
|
||||
"commons-io:commons-io": {
|
||||
"locked": "2.5",
|
||||
"requested": "2.5"
|
||||
},
|
||||
"io.dropwizard.metrics:metrics-core": {
|
||||
"locked": "3.2.2",
|
||||
"requested": "3.2.2"
|
||||
},
|
||||
"io.zipkin.brave:brave": {
|
||||
"locked": "4.0.6",
|
||||
"requested": "4.0.6"
|
||||
},
|
||||
"io.zipkin.java:zipkin": {
|
||||
"locked": "1.29.2",
|
||||
"requested": "1.29.2"
|
||||
},
|
||||
"javax.validation:validation-api": {
|
||||
"locked": "2.0.0.CR3",
|
||||
"requested": "2.0.0.CR3"
|
||||
},
|
||||
"junit:junit": {
|
||||
"locked": "4.12",
|
||||
"requested": "4.12"
|
||||
},
|
||||
"org.apache.cassandra:cassandra-all": {
|
||||
"locked": "3.11.0",
|
||||
"requested": "3.11.0"
|
||||
},
|
||||
"org.apache.commons:commons-lang3": {
|
||||
"locked": "3.6",
|
||||
"requested": "3.6"
|
||||
},
|
||||
"org.aspectj:aspectjrt": {
|
||||
"locked": "1.8.10",
|
||||
"requested": "1.8.10"
|
||||
},
|
||||
"org.aspectj:aspectjweaver": {
|
||||
"locked": "1.8.10",
|
||||
"requested": "1.8.10"
|
||||
},
|
||||
"org.cassandraunit:cassandra-unit": {
|
||||
"locked": "3.1.4.0-SNAPSHOT",
|
||||
"requested": "3.1.4.0-SNAPSHOT"
|
||||
},
|
||||
"org.codehaus.jackson:jackson-core-asl": {
|
||||
"locked": "1.9.13",
|
||||
"requested": "1.9.13"
|
||||
},
|
||||
"org.codehaus.jackson:jackson-mapper-asl": {
|
||||
"locked": "1.9.13",
|
||||
"requested": "1.9.13"
|
||||
},
|
||||
"org.hamcrest:hamcrest-core": {
|
||||
"locked": "1.3",
|
||||
"requested": "1.3"
|
||||
},
|
||||
"org.hamcrest:hamcrest-library": {
|
||||
"locked": "1.3",
|
||||
"requested": "1.3"
|
||||
},
|
||||
"org.mockito:mockito-core": {
|
||||
"locked": "2.8.47",
|
||||
"requested": "2.8.47"
|
||||
},
|
||||
"org.slf4j:jcl-over-slf4j": {
|
||||
"locked": "1.7.7",
|
||||
"requested": "1.7.1"
|
||||
},
|
||||
"org.slf4j:slf4j-api": {
|
||||
"locked": "1.7.25",
|
||||
"requested": "1.7.1"
|
||||
},
|
||||
"org.springframework:spring-core": {
|
||||
"locked": "4.3.10.RELEASE",
|
||||
"requested": "4.3.10.RELEASE"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -11,7 +11,7 @@
|
|||
</content>
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
<orderEntry type="library" name="Maven: com.datastax.cassandra:cassandra-driver-core:3.3.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.datastax.cassandra:cassandra-driver-core:3.3.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: io.netty:netty-handler:4.0.47.Final" level="project" />
|
||||
<orderEntry type="library" name="Maven: io.netty:netty-buffer:4.0.47.Final" level="project" />
|
||||
<orderEntry type="library" name="Maven: io.netty:netty-common:4.0.47.Final" level="project" />
|
||||
|
@ -28,14 +28,16 @@
|
|||
<orderEntry type="library" name="Maven: com.github.jnr:jnr-x86asm:1.0.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.github.jnr:jnr-posix:3.0.27" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.github.jnr:jnr-constants:0.9.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.datastax.cassandra:cassandra-driver-extras:3.3.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.diffplug.durian:durian:3.4.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.aspectj:aspectjrt:1.8.10" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.aspectj:aspectjweaver:1.8.10" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.commons:commons-lang3:3.6" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.springframework:spring-core:4.3.10.RELEASE" level="project" />
|
||||
<orderEntry type="library" name="Maven: commons-logging:commons-logging:1.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: javax.cache:cache-api:1.1.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.google.guava:guava:20.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: io.zipkin.java:zipkin:1.29.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: io.zipkin.brave:brave:4.0.6" level="project" />
|
||||
<orderEntry type="library" name="Maven: io.zipkin.reporter:zipkin-reporter:0.6.12" level="project" />
|
||||
<orderEntry type="library" name="Maven: io.dropwizard.metrics:metrics-core:3.2.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: javax.validation:validation-api:2.0.0.CR3" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="Maven: org.codehaus.jackson:jackson-mapper-asl:1.9.13" level="project" />
|
||||
|
@ -114,15 +116,20 @@
|
|||
<orderEntry type="library" scope="TEST" name="Maven: org.caffinitas.ohc:ohc-core:0.4.4" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="Maven: com.github.ben-manes.caffeine:caffeine:2.2.6" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="Maven: org.jctools:jctools-core:1.2.1" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="Maven: ca.exprofesso:guava-jcache:1.0.4" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="Maven: commons-io:commons-io:2.5" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="Maven: junit:junit:4.12" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="Maven: com.github.stephenc:jamm:0.2.5" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="Maven: org.hamcrest:hamcrest-library:1.3" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="Maven: org.hamcrest:hamcrest-core:1.3" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="Maven: org.mockito:mockito-core:2.8.47" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="Maven: net.bytebuddy:byte-buddy:1.6.14" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="Maven: net.bytebuddy:byte-buddy-agent:1.6.14" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="Maven: org.objenesis:objenesis:2.5" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="Maven: com.github.ben-manes.caffeine:jcache:2.5.6" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="Maven: javax.cache:cache-api:1.0.0" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="Maven: com.typesafe:config:1.3.1" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="Maven: javax.inject:javax.inject:1" level="project" />
|
||||
<orderEntry type="library" name="Maven: net.spy:spymemcached:2.12.3" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.slf4j:slf4j-api:1.7.1" level="project" />
|
||||
<orderEntry type="library" scope="RUNTIME" name="Maven: org.slf4j:jcl-over-slf4j:1.7.1" level="project" />
|
||||
</component>
|
||||
|
|
74
pom.xml
74
pom.xml
|
@ -109,13 +109,7 @@
|
|||
<dependency>
|
||||
<groupId>com.datastax.cassandra</groupId>
|
||||
<artifactId>cassandra-driver-core</artifactId>
|
||||
<version>3.3.2</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.datastax.cassandra</groupId>
|
||||
<artifactId>cassandra-driver-extras</artifactId>
|
||||
<version>3.3.2</version>
|
||||
<version>3.3.0</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
@ -124,6 +118,12 @@
|
|||
<version>3.4.0</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.aspectj</groupId>
|
||||
<artifactId>aspectjrt</artifactId>
|
||||
<version>1.8.10</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.aspectj</groupId>
|
||||
<artifactId>aspectjweaver</artifactId>
|
||||
|
@ -142,19 +142,25 @@
|
|||
<version>4.3.10.RELEASE</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>javax.cache</groupId>
|
||||
<artifactId>cache-api</artifactId>
|
||||
<version>1.1.0</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
<version>20.0</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Metrics -->
|
||||
<!-- Metrics and tracing -->
|
||||
<dependency>
|
||||
<groupId>io.zipkin.java</groupId>
|
||||
<artifactId>zipkin</artifactId>
|
||||
<version>1.29.2</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.zipkin.brave</groupId>
|
||||
<artifactId>brave</artifactId>
|
||||
<version>4.0.6</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.dropwizard.metrics</groupId>
|
||||
<artifactId>metrics-core</artifactId>
|
||||
|
@ -211,24 +217,6 @@
|
|||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>ca.exprofesso</groupId>
|
||||
<artifactId>guava-jcache</artifactId>
|
||||
<version>1.0.4</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>javax.cache</groupId>
|
||||
<artifactId>cache-api</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>commons-io</groupId>
|
||||
<artifactId>commons-io</artifactId>
|
||||
|
@ -243,6 +231,13 @@
|
|||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.github.stephenc</groupId>
|
||||
<artifactId>jamm</artifactId>
|
||||
<version>0.2.5</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.hamcrest</groupId>
|
||||
<artifactId>hamcrest-library</artifactId>
|
||||
|
@ -264,6 +259,20 @@
|
|||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- Caching -->
|
||||
<dependency>
|
||||
<groupId>com.github.ben-manes.caffeine</groupId>
|
||||
<artifactId>jcache</artifactId>
|
||||
<version>2.5.6</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>net.spy</groupId>
|
||||
<artifactId>spymemcached</artifactId>
|
||||
<version>2.12.3</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Logging -->
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
|
@ -277,6 +286,7 @@
|
|||
<version>1.7.1</version>
|
||||
<scope>runtime</scope>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
|
1
settings.gradle
Normal file
1
settings.gradle
Normal file
|
@ -0,0 +1 @@
|
|||
rootProject.name = 'helenus-core'
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,9 +15,10 @@
|
|||
*/
|
||||
package com.datastax.driver.core.querybuilder;
|
||||
|
||||
import com.datastax.driver.core.CodecRegistry;
|
||||
import java.util.List;
|
||||
|
||||
import com.datastax.driver.core.CodecRegistry;
|
||||
|
||||
public class IsNotNullClause extends Clause {
|
||||
|
||||
final String name;
|
||||
|
|
|
@ -16,10 +16,8 @@ public class CreateCustomIndex extends CreateIndex {
|
|||
CreateCustomIndex(String indexName) {
|
||||
super(indexName);
|
||||
validateNotEmpty(indexName, "Index name");
|
||||
validateNotKeyWord(
|
||||
indexName,
|
||||
String.format(
|
||||
"The index name '%s' is not allowed because it is a reserved keyword", indexName));
|
||||
validateNotKeyWord(indexName,
|
||||
String.format("The index name '%s' is not allowed because it is a reserved keyword", indexName));
|
||||
this.indexName = indexName;
|
||||
}
|
||||
|
||||
|
@ -36,22 +34,20 @@ public class CreateCustomIndex extends CreateIndex {
|
|||
/**
|
||||
* Specify the keyspace and table to create the index on.
|
||||
*
|
||||
* @param keyspaceName the keyspace name.
|
||||
* @param tableName the table name.
|
||||
* @return a {@link CreateIndex.CreateIndexOn} that will allow the specification of the column.
|
||||
* @param keyspaceName
|
||||
* the keyspace name.
|
||||
* @param tableName
|
||||
* the table name.
|
||||
* @return a {@link CreateIndex.CreateIndexOn} that will allow the specification
|
||||
* of the column.
|
||||
*/
|
||||
public CreateIndex.CreateIndexOn onTable(String keyspaceName, String tableName) {
|
||||
validateNotEmpty(keyspaceName, "Keyspace name");
|
||||
validateNotEmpty(tableName, "Table name");
|
||||
validateNotKeyWord(
|
||||
keyspaceName,
|
||||
String.format(
|
||||
"The keyspace name '%s' is not allowed because it is a reserved keyword",
|
||||
keyspaceName));
|
||||
validateNotKeyWord(
|
||||
tableName,
|
||||
String.format(
|
||||
"The table name '%s' is not allowed because it is a reserved keyword", tableName));
|
||||
validateNotKeyWord(keyspaceName,
|
||||
String.format("The keyspace name '%s' is not allowed because it is a reserved keyword", keyspaceName));
|
||||
validateNotKeyWord(tableName,
|
||||
String.format("The table name '%s' is not allowed because it is a reserved keyword", tableName));
|
||||
this.keyspaceName = Optional.fromNullable(keyspaceName);
|
||||
this.tableName = tableName;
|
||||
return new CreateCustomIndex.CreateIndexOn();
|
||||
|
@ -60,15 +56,15 @@ public class CreateCustomIndex extends CreateIndex {
|
|||
/**
|
||||
* Specify the table to create the index on.
|
||||
*
|
||||
* @param tableName the table name.
|
||||
* @return a {@link CreateIndex.CreateIndexOn} that will allow the specification of the column.
|
||||
* @param tableName
|
||||
* the table name.
|
||||
* @return a {@link CreateIndex.CreateIndexOn} that will allow the specification
|
||||
* of the column.
|
||||
*/
|
||||
public CreateIndex.CreateIndexOn onTable(String tableName) {
|
||||
validateNotEmpty(tableName, "Table name");
|
||||
validateNotKeyWord(
|
||||
tableName,
|
||||
String.format(
|
||||
"The table name '%s' is not allowed because it is a reserved keyword", tableName));
|
||||
validateNotKeyWord(tableName,
|
||||
String.format("The table name '%s' is not allowed because it is a reserved keyword", tableName));
|
||||
this.tableName = tableName;
|
||||
return new CreateCustomIndex.CreateIndexOn();
|
||||
}
|
||||
|
@ -83,8 +79,7 @@ public class CreateCustomIndex extends CreateIndex {
|
|||
|
||||
@Override
|
||||
public String buildInternal() {
|
||||
StringBuilder createStatement =
|
||||
new StringBuilder(STATEMENT_START).append("CREATE CUSTOM INDEX ");
|
||||
StringBuilder createStatement = new StringBuilder(STATEMENT_START).append("CREATE CUSTOM INDEX ");
|
||||
|
||||
if (ifNotExists) {
|
||||
createStatement.append("IF NOT EXISTS ");
|
||||
|
@ -122,15 +117,14 @@ public class CreateCustomIndex extends CreateIndex {
|
|||
/**
|
||||
* Specify the column to create the index on.
|
||||
*
|
||||
* @param columnName the column name.
|
||||
* @param columnName
|
||||
* the column name.
|
||||
* @return the final CREATE INDEX statement.
|
||||
*/
|
||||
public SchemaStatement andColumn(String columnName) {
|
||||
validateNotEmpty(columnName, "Column name");
|
||||
validateNotKeyWord(
|
||||
columnName,
|
||||
String.format(
|
||||
"The column name '%s' is not allowed because it is a reserved keyword", columnName));
|
||||
validateNotKeyWord(columnName,
|
||||
String.format("The column name '%s' is not allowed because it is a reserved keyword", columnName));
|
||||
CreateCustomIndex.this.columnName = columnName;
|
||||
return SchemaStatement.fromQueryString(buildInternal());
|
||||
}
|
||||
|
@ -138,15 +132,14 @@ public class CreateCustomIndex extends CreateIndex {
|
|||
/**
|
||||
* Create an index on the keys of the given map column.
|
||||
*
|
||||
* @param columnName the column name.
|
||||
* @param columnName
|
||||
* the column name.
|
||||
* @return the final CREATE INDEX statement.
|
||||
*/
|
||||
public SchemaStatement andKeysOfColumn(String columnName) {
|
||||
validateNotEmpty(columnName, "Column name");
|
||||
validateNotKeyWord(
|
||||
columnName,
|
||||
String.format(
|
||||
"The column name '%s' is not allowed because it is a reserved keyword", columnName));
|
||||
validateNotKeyWord(columnName,
|
||||
String.format("The column name '%s' is not allowed because it is a reserved keyword", columnName));
|
||||
CreateCustomIndex.this.columnName = columnName;
|
||||
CreateCustomIndex.this.keys = true;
|
||||
return SchemaStatement.fromQueryString(buildInternal());
|
||||
|
|
|
@ -5,16 +5,12 @@ import com.datastax.driver.core.querybuilder.Select;
|
|||
|
||||
public class CreateMaterializedView extends Create {
|
||||
|
||||
private final String viewName;
|
||||
private String viewName;
|
||||
private Select.Where selection;
|
||||
private String primaryKey;
|
||||
private String clustering;
|
||||
|
||||
public CreateMaterializedView(
|
||||
String keyspaceName,
|
||||
String viewName,
|
||||
Select.Where selection,
|
||||
String primaryKey,
|
||||
public CreateMaterializedView(String keyspaceName, String viewName, Select.Where selection, String primaryKey,
|
||||
String clustering) {
|
||||
super(keyspaceName, viewName);
|
||||
this.viewName = viewName;
|
||||
|
@ -28,8 +24,7 @@ public class CreateMaterializedView extends Create {
|
|||
}
|
||||
|
||||
public String buildInternal() {
|
||||
StringBuilder createStatement =
|
||||
new StringBuilder(STATEMENT_START).append("CREATE MATERIALIZED VIEW");
|
||||
StringBuilder createStatement = new StringBuilder(STATEMENT_START).append("CREATE MATERIALIZED VIEW");
|
||||
if (ifNotExists) {
|
||||
createStatement.append(" IF NOT EXISTS");
|
||||
}
|
||||
|
|
|
@ -11,8 +11,7 @@ public class CreateSasiIndex extends CreateCustomIndex {
|
|||
}
|
||||
|
||||
String getOptions() {
|
||||
return "'analyzer_class': "
|
||||
+ "'org.apache.cassandra.index.sasi.analyzer.NonTokenizingAnalyzer', "
|
||||
return "'analyzer_class': " + "'org.apache.cassandra.index.sasi.analyzer.NonTokenizingAnalyzer', "
|
||||
+ "'case_sensitive': 'false'";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,10 +4,10 @@ import com.google.common.base.Optional;
|
|||
|
||||
public class DropMaterializedView extends Drop {
|
||||
|
||||
private final String itemType = "MATERIALIZED VIEW";
|
||||
private Optional<String> keyspaceName = Optional.absent();
|
||||
private String itemName;
|
||||
private boolean ifExists = true;
|
||||
|
||||
public DropMaterializedView(String keyspaceName, String viewName) {
|
||||
this(keyspaceName, viewName, DroppedItem.MATERIALIZED_VIEW);
|
||||
}
|
||||
|
@ -31,7 +31,7 @@ public class DropMaterializedView extends Drop {
|
|||
|
||||
@Override
|
||||
public String buildInternal() {
|
||||
StringBuilder dropStatement = new StringBuilder("DROP MATERIALIZED VIEW ");
|
||||
StringBuilder dropStatement = new StringBuilder("DROP " + itemType + " ");
|
||||
if (ifExists) {
|
||||
dropStatement.append("IF EXISTS ");
|
||||
}
|
||||
|
@ -44,9 +44,6 @@ public class DropMaterializedView extends Drop {
|
|||
}
|
||||
|
||||
enum DroppedItem {
|
||||
TABLE,
|
||||
TYPE,
|
||||
INDEX,
|
||||
MATERIALIZED_VIEW
|
||||
TABLE, TYPE, INDEX, MATERIALIZED_VIEW
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -18,6 +17,7 @@ package net.helenus.config;
|
|||
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.function.Function;
|
||||
|
||||
import net.helenus.core.DslInstantiator;
|
||||
import net.helenus.core.MapperInstantiator;
|
||||
import net.helenus.core.reflect.ReflectionDslInstantiator;
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -19,6 +18,7 @@ package net.helenus.config;
|
|||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.util.function.Function;
|
||||
|
||||
import net.helenus.mapping.annotation.Transient;
|
||||
|
||||
public enum GetterMethodDetector implements Function<Method, Boolean> {
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -18,6 +17,7 @@ package net.helenus.config;
|
|||
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.function.Function;
|
||||
|
||||
import net.helenus.core.DslInstantiator;
|
||||
import net.helenus.core.MapperInstantiator;
|
||||
|
||||
|
|
|
@ -3,6 +3,7 @@ package net.helenus.core;
|
|||
import java.time.LocalDateTime;
|
||||
import java.time.ZoneId;
|
||||
import java.util.Date;
|
||||
|
||||
import net.helenus.core.reflect.MapExportable;
|
||||
|
||||
public abstract class AbstractAuditedEntityDraft<E> extends AbstractEntityDraft<E> {
|
||||
|
@ -33,6 +34,6 @@ public abstract class AbstractAuditedEntityDraft<E> extends AbstractEntityDraft<
|
|||
}
|
||||
|
||||
public Date createdAt() {
|
||||
return get("createdAt", Date.class);
|
||||
return (Date) get("createdAt", Date.class);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,37 +1,26 @@
|
|||
package net.helenus.core;
|
||||
|
||||
import com.google.common.primitives.Primitives;
|
||||
import java.io.Serializable;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
|
||||
import com.google.common.primitives.Primitives;
|
||||
|
||||
import net.helenus.core.reflect.DefaultPrimitiveTypes;
|
||||
import net.helenus.core.reflect.Drafted;
|
||||
import net.helenus.core.reflect.MapExportable;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
import net.helenus.mapping.MappingUtil;
|
||||
import org.apache.commons.lang3.SerializationUtils;
|
||||
|
||||
public abstract class AbstractEntityDraft<E> implements Drafted<E> {
|
||||
|
||||
private final Map<String, Object> backingMap = new HashMap<String, Object>();
|
||||
private final MapExportable entity;
|
||||
private final Map<String, Object> valuesMap;
|
||||
private final Set<String> readSet;
|
||||
private final Map<String, Object> mutationsMap = new HashMap<String, Object>();
|
||||
private final Map<String, Object> entityMap;
|
||||
|
||||
public AbstractEntityDraft(MapExportable entity) {
|
||||
this.entity = entity;
|
||||
// Entities can mutate their map.
|
||||
if (entity != null) {
|
||||
this.valuesMap = entity.toMap(true);
|
||||
this.readSet = entity.toReadSet();
|
||||
} else {
|
||||
this.valuesMap = new HashMap<String, Object>();
|
||||
this.readSet = new HashSet<String>();
|
||||
}
|
||||
this.entityMap = entity != null ? entity.toMap() : new HashMap<String, Object>();
|
||||
}
|
||||
|
||||
public abstract Class<E> getEntityClass();
|
||||
|
@ -41,17 +30,16 @@ public abstract class AbstractEntityDraft<E> implements Drafted<E> {
|
|||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public <T> T get(Getter<T> getter, Class<?> returnType) {
|
||||
protected <T> T get(Getter<T> getter, Class<?> returnType) {
|
||||
return (T) get(this.<T>methodNameFor(getter), returnType);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public <T> T get(String key, Class<?> returnType) {
|
||||
readSet.add(key);
|
||||
T value = (T) mutationsMap.get(key);
|
||||
protected <T> T get(String key, Class<?> returnType) {
|
||||
T value = (T) backingMap.get(key);
|
||||
|
||||
if (value == null) {
|
||||
value = (T) valuesMap.get(key);
|
||||
value = (T) entityMap.get(key);
|
||||
if (value == null) {
|
||||
|
||||
if (Primitives.allPrimitiveTypes().contains(returnType)) {
|
||||
|
@ -63,65 +51,52 @@ public abstract class AbstractEntityDraft<E> implements Drafted<E> {
|
|||
|
||||
return (T) type.getDefaultValue();
|
||||
}
|
||||
} else {
|
||||
// Collections fetched from the valuesMap
|
||||
if (value instanceof Collection) {
|
||||
value = (T) SerializationUtils.<Serializable>clone((Serializable) value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
public <T> Object set(Getter<T> getter, Object value) {
|
||||
HelenusProperty prop = MappingUtil.resolveMappingProperty(getter).getProperty();
|
||||
String key = prop.getPropertyName();
|
||||
|
||||
HelenusValidator.INSTANCE.validate(prop, value);
|
||||
protected <T> Object set(Getter<T> getter, Object value) {
|
||||
return set(this.<T>methodNameFor(getter), value);
|
||||
}
|
||||
|
||||
protected Object set(String key, Object value) {
|
||||
if (key == null || value == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
mutationsMap.put(key, value);
|
||||
backingMap.put(key, value);
|
||||
return value;
|
||||
}
|
||||
|
||||
public Object set(String key, Object value) {
|
||||
if (key == null || value == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
mutationsMap.put(key, value);
|
||||
return value;
|
||||
}
|
||||
|
||||
public void put(String key, Object value) {
|
||||
mutationsMap.put(key, value);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public <T> T mutate(Getter<T> getter, T value) {
|
||||
protected <T> T mutate(Getter<T> getter, T value) {
|
||||
return (T) mutate(this.<T>methodNameFor(getter), value);
|
||||
}
|
||||
|
||||
public <T> T mutate(String key, T value) {
|
||||
protected Object mutate(String key, Object value) {
|
||||
Objects.requireNonNull(key);
|
||||
|
||||
if (value != null) {
|
||||
if (value == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (entity != null) {
|
||||
T currentValue = this.<T>fetch(key);
|
||||
if (!value.equals(currentValue)) {
|
||||
mutationsMap.put(key, value);
|
||||
Map<String, Object> map = entity.toMap();
|
||||
|
||||
if (map.containsKey(key) && !value.equals(map.get(key))) {
|
||||
backingMap.put(key, value);
|
||||
return value;
|
||||
}
|
||||
|
||||
return map.get(key);
|
||||
} else {
|
||||
mutationsMap.put(key, value);
|
||||
}
|
||||
}
|
||||
backingMap.put(key, value);
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private <T> String methodNameFor(Getter<T> getter) {
|
||||
return MappingUtil.resolveMappingProperty(getter).getProperty().getPropertyName();
|
||||
|
@ -133,8 +108,8 @@ public abstract class AbstractEntityDraft<E> implements Drafted<E> {
|
|||
|
||||
public Object unset(String key) {
|
||||
if (key != null) {
|
||||
Object value = mutationsMap.get(key);
|
||||
mutationsMap.put(key, null);
|
||||
Object value = backingMap.get(key);
|
||||
backingMap.put(key, null);
|
||||
return value;
|
||||
}
|
||||
return null;
|
||||
|
@ -144,18 +119,10 @@ public abstract class AbstractEntityDraft<E> implements Drafted<E> {
|
|||
return this.<T>reset(this.<T>methodNameFor(getter), desiredValue);
|
||||
}
|
||||
|
||||
private <T> T fetch(String key) {
|
||||
T value = (T) mutationsMap.get(key);
|
||||
if (value == null) {
|
||||
value = (T) valuesMap.get(key);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
public <T> boolean reset(String key, T desiredValue) {
|
||||
if (key != null && desiredValue != null) {
|
||||
@SuppressWarnings("unchecked")
|
||||
T currentValue = (T) this.<T>fetch(key);
|
||||
T currentValue = (T) backingMap.get(key);
|
||||
if (currentValue == null || !currentValue.equals(desiredValue)) {
|
||||
set(key, desiredValue);
|
||||
return true;
|
||||
|
@ -166,37 +133,32 @@ public abstract class AbstractEntityDraft<E> implements Drafted<E> {
|
|||
|
||||
@Override
|
||||
public Map<String, Object> toMap() {
|
||||
return toMap(valuesMap);
|
||||
return toMap(entityMap);
|
||||
}
|
||||
|
||||
public Map<String, Object> toMap(Map<String, Object> entityMap) {
|
||||
Map<String, Object> combined;
|
||||
if (entityMap != null && entityMap.size() > 0) {
|
||||
combined = new HashMap<String, Object>(entityMap.size());
|
||||
for (Map.Entry<String, Object> e : entityMap.entrySet()) {
|
||||
combined.put(e.getKey(), e.getValue());
|
||||
for (String key : entityMap.keySet()) {
|
||||
combined.put(key, entityMap.get(key));
|
||||
}
|
||||
} else {
|
||||
combined = new HashMap<String, Object>(mutationsMap.size());
|
||||
combined = new HashMap<String, Object>(backingMap.size());
|
||||
}
|
||||
for (String key : mutated()) {
|
||||
combined.put(key, mutationsMap.get(key));
|
||||
combined.put(key, backingMap.get(key));
|
||||
}
|
||||
return combined;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<String> mutated() {
|
||||
return mutationsMap.keySet();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<String> read() {
|
||||
return readSet;
|
||||
return backingMap.keySet();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return mutationsMap.toString();
|
||||
return backingMap.toString();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,21 +15,26 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import java.io.PrintStream;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.Executor;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.codahale.metrics.MetricRegistry;
|
||||
import com.datastax.driver.core.*;
|
||||
import com.google.common.base.Stopwatch;
|
||||
import com.google.common.collect.Table;
|
||||
import com.google.common.util.concurrent.ListenableFuture;
|
||||
import java.io.PrintStream;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.Executor;
|
||||
|
||||
import brave.Tracer;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.core.operation.Operation;
|
||||
import net.helenus.mapping.value.ColumnValuePreparer;
|
||||
import net.helenus.mapping.value.ColumnValueProvider;
|
||||
import net.helenus.support.Either;
|
||||
import net.helenus.support.HelenusException;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public abstract class AbstractSessionOperations {
|
||||
|
||||
|
@ -42,8 +46,6 @@ public abstract class AbstractSessionOperations {
|
|||
|
||||
public abstract boolean isShowCql();
|
||||
|
||||
public abstract boolean showValues();
|
||||
|
||||
public abstract PrintStream getPrintStream();
|
||||
|
||||
public abstract Executor getExecutor();
|
||||
|
@ -60,6 +62,7 @@ public abstract class AbstractSessionOperations {
|
|||
|
||||
public PreparedStatement prepare(RegularStatement statement) {
|
||||
try {
|
||||
logStatement(statement, false);
|
||||
return currentSession().prepare(statement);
|
||||
} catch (RuntimeException e) {
|
||||
throw translateException(e);
|
||||
|
@ -68,53 +71,68 @@ public abstract class AbstractSessionOperations {
|
|||
|
||||
public ListenableFuture<PreparedStatement> prepareAsync(RegularStatement statement) {
|
||||
try {
|
||||
logStatement(statement, false);
|
||||
return currentSession().prepareAsync(statement);
|
||||
} catch (RuntimeException e) {
|
||||
throw translateException(e);
|
||||
}
|
||||
}
|
||||
|
||||
public ResultSet execute(Statement statement) {
|
||||
return execute(statement, null, null);
|
||||
public ResultSet execute(Statement statement, boolean showValues) {
|
||||
return execute(statement, null, null, showValues);
|
||||
}
|
||||
|
||||
public ResultSet execute(Statement statement, Stopwatch timer) {
|
||||
return execute(statement, null, timer);
|
||||
public ResultSet execute(Statement statement, Stopwatch timer, boolean showValues) {
|
||||
return execute(statement, null, timer, showValues);
|
||||
}
|
||||
|
||||
public ResultSet execute(Statement statement, UnitOfWork uow) {
|
||||
return execute(statement, uow, null);
|
||||
public ResultSet execute(Statement statement, UnitOfWork uow, boolean showValues) {
|
||||
return execute(statement, uow, null, showValues);
|
||||
}
|
||||
|
||||
public ResultSet execute(Statement statement, UnitOfWork uow, Stopwatch timer) {
|
||||
return executeAsync(statement, uow, timer).getUninterruptibly();
|
||||
public ResultSet execute(Statement statement, UnitOfWork uow, Stopwatch timer, boolean showValues) {
|
||||
return executeAsync(statement, uow, timer, showValues).getUninterruptibly();
|
||||
}
|
||||
|
||||
public ResultSetFuture executeAsync(Statement statement) {
|
||||
return executeAsync(statement, null, null);
|
||||
public ResultSetFuture executeAsync(Statement statement, boolean showValues) {
|
||||
return executeAsync(statement, null, null, showValues);
|
||||
}
|
||||
|
||||
public ResultSetFuture executeAsync(Statement statement, Stopwatch timer) {
|
||||
return executeAsync(statement, null, timer);
|
||||
public ResultSetFuture executeAsync(Statement statement, Stopwatch timer, boolean showValues) {
|
||||
return executeAsync(statement, null, timer, showValues);
|
||||
}
|
||||
|
||||
public ResultSetFuture executeAsync(Statement statement, UnitOfWork uow) {
|
||||
return executeAsync(statement, uow, null);
|
||||
public ResultSetFuture executeAsync(Statement statement, UnitOfWork uow, boolean showValues) {
|
||||
return executeAsync(statement, uow, null, showValues);
|
||||
}
|
||||
|
||||
public ResultSetFuture executeAsync(Statement statement, UnitOfWork uow, Stopwatch timer) {
|
||||
public ResultSetFuture executeAsync(Statement statement, UnitOfWork uow, Stopwatch timer, boolean showValues) {
|
||||
try {
|
||||
logStatement(statement, showValues);
|
||||
return currentSession().executeAsync(statement);
|
||||
} catch (RuntimeException e) {
|
||||
throw translateException(e);
|
||||
}
|
||||
}
|
||||
|
||||
private void logStatement(Statement statement, boolean showValues) {
|
||||
if (isShowCql()) {
|
||||
printCql(Operation.queryString(statement, showValues));
|
||||
} else if (LOG.isInfoEnabled()) {
|
||||
LOG.info("CQL> " + Operation.queryString(statement, showValues));
|
||||
}
|
||||
}
|
||||
|
||||
public Tracer getZipkinTracer() {
|
||||
return null;
|
||||
}
|
||||
|
||||
public MetricRegistry getMetricRegistry() {
|
||||
return null;
|
||||
}
|
||||
|
||||
public void mergeCache(Table<String, String, Either<Object, List<Facet>>> uowCache) {}
|
||||
public void mergeCache(Table<String, String, Either<Object, List<Facet>>> uowCache) {
|
||||
}
|
||||
|
||||
RuntimeException translateException(RuntimeException e) {
|
||||
if (e instanceof HelenusException) {
|
||||
|
@ -127,7 +145,13 @@ public abstract class AbstractSessionOperations {
|
|||
return null;
|
||||
}
|
||||
|
||||
public void updateCache(Object pojo, List<Facet> facets) {}
|
||||
public void updateCache(Object pojo, List<Facet> facets) {
|
||||
}
|
||||
|
||||
public void cacheEvict(List<Facet> facets) {}
|
||||
void printCql(String cql) {
|
||||
getPrintStream().println(cql);
|
||||
}
|
||||
|
||||
public void cacheEvict(List<Facet> facets) {
|
||||
}
|
||||
}
|
||||
|
|
354
src/main/java/net/helenus/core/AbstractUnitOfWork.java
Normal file
354
src/main/java/net/helenus/core/AbstractUnitOfWork.java
Normal file
|
@ -0,0 +1,354 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import static net.helenus.core.HelenusSession.deleted;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.diffplug.common.base.Errors;
|
||||
import com.google.common.base.Stopwatch;
|
||||
import com.google.common.collect.HashBasedTable;
|
||||
import com.google.common.collect.Table;
|
||||
import com.google.common.collect.TreeTraverser;
|
||||
|
||||
import net.helenus.core.cache.CacheUtil;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.support.Either;
|
||||
|
||||
/** Encapsulates the concept of a "transaction" as a unit-of-work. */
|
||||
public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfWork<E>, AutoCloseable {
|
||||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(AbstractUnitOfWork.class);
|
||||
|
||||
private final List<AbstractUnitOfWork<E>> nested = new ArrayList<>();
|
||||
private final HelenusSession session;
|
||||
private final AbstractUnitOfWork<E> parent;
|
||||
private final Table<String, String, Either<Object, List<Facet>>> cache = HashBasedTable.create();
|
||||
protected String purpose;
|
||||
protected int cacheHits = 0;
|
||||
protected int cacheMisses = 0;
|
||||
protected int databaseLookups = 0;
|
||||
protected Stopwatch elapsedTime;
|
||||
protected Map<String, Double> databaseTime = new HashMap<>();
|
||||
protected double cacheLookupTime = 0.0;
|
||||
private List<CommitThunk> postCommit = new ArrayList<CommitThunk>();
|
||||
private boolean aborted = false;
|
||||
private boolean committed = false;
|
||||
|
||||
protected AbstractUnitOfWork(HelenusSession session, AbstractUnitOfWork<E> parent) {
|
||||
Objects.requireNonNull(session, "containing session cannot be null");
|
||||
|
||||
this.session = session;
|
||||
this.parent = parent;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addDatabaseTime(String name, Stopwatch amount) {
|
||||
Double time = databaseTime.get(name);
|
||||
if (time == null) {
|
||||
databaseTime.put(name, (double) amount.elapsed(TimeUnit.MICROSECONDS));
|
||||
} else {
|
||||
databaseTime.put(name, time + amount.elapsed(TimeUnit.MICROSECONDS));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addCacheLookupTime(Stopwatch amount) {
|
||||
cacheLookupTime += amount.elapsed(TimeUnit.MICROSECONDS);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addNestedUnitOfWork(UnitOfWork<E> uow) {
|
||||
synchronized (nested) {
|
||||
nested.add((AbstractUnitOfWork<E>) uow);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized UnitOfWork<E> begin() {
|
||||
elapsedTime = Stopwatch.createStarted();
|
||||
// log.recordCacheAndDatabaseOperationCount(txn::start)
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public UnitOfWork setPurpose(String purpose) {
|
||||
this.purpose = purpose;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void recordCacheAndDatabaseOperationCount(int cache, int ops) {
|
||||
if (cache > 0) {
|
||||
cacheHits += cache;
|
||||
} else {
|
||||
cacheMisses += Math.abs(cache);
|
||||
}
|
||||
if (ops > 0) {
|
||||
databaseLookups += ops;
|
||||
}
|
||||
}
|
||||
|
||||
public String logTimers(String what) {
|
||||
double e = (double) elapsedTime.elapsed(TimeUnit.MICROSECONDS) / 1000.0;
|
||||
double d = 0.0;
|
||||
double c = cacheLookupTime / 1000.0;
|
||||
double fc = (c / e) * 100.0;
|
||||
String database = "";
|
||||
if (databaseTime.size() > 0) {
|
||||
List<String> dbt = new ArrayList<>(databaseTime.size());
|
||||
for (String name : databaseTime.keySet()) {
|
||||
double t = databaseTime.get(name) / 1000.0;
|
||||
d += t;
|
||||
dbt.add(String.format("%s took %,.3fms %,2.2f%%", name, t, (t / e) * 100.0));
|
||||
}
|
||||
double fd = (d / e) * 100.0;
|
||||
database = String.format(", %d quer%s (%,.3fms %,2.2f%% - %s)", databaseLookups,
|
||||
(databaseLookups > 1) ? "ies" : "y", d, fd, String.join(", ", dbt));
|
||||
}
|
||||
String cache = "";
|
||||
if (cacheLookupTime > 0) {
|
||||
int cacheLookups = cacheHits + cacheMisses;
|
||||
cache = String.format(" with %d cache lookup%s (%,.3fms %,2.2f%% - %,d hit, %,d miss)", cacheLookups,
|
||||
cacheLookups > 1 ? "s" : "", c, fc, cacheHits, cacheMisses);
|
||||
}
|
||||
String da = "";
|
||||
if (databaseTime.size() > 0 || cacheLookupTime > 0) {
|
||||
double dat = d + c;
|
||||
double daf = (dat / e) * 100;
|
||||
da = String.format(" consuming %,.3fms for data access, or %,2.2f%% of total UOW time.", dat, daf);
|
||||
}
|
||||
String n = nested.stream().map(uow -> String.valueOf(uow.hashCode())).collect(Collectors.joining(", "));
|
||||
String s = String.format(Locale.US, "UOW(%s%s) %s in %,.3fms%s%s%s%s", hashCode(),
|
||||
(nested.size() > 0 ? ", [" + n + "]" : ""), what, e, cache, database, da,
|
||||
(purpose == null ? "" : " " + purpose));
|
||||
return s;
|
||||
}
|
||||
|
||||
private void applyPostCommitFunctions() {
|
||||
if (!postCommit.isEmpty()) {
|
||||
for (CommitThunk f : postCommit) {
|
||||
f.apply();
|
||||
}
|
||||
}
|
||||
if (LOG.isInfoEnabled()) {
|
||||
LOG.info(logTimers("committed"));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<Object> cacheLookup(List<Facet> facets) {
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
Optional<Object> result = Optional.empty();
|
||||
for (Facet facet : facets) {
|
||||
if (!facet.fixed()) {
|
||||
String columnName = facet.name() + "==" + facet.value();
|
||||
Either<Object, List<Facet>> eitherValue = cache.get(tableName, columnName);
|
||||
if (eitherValue != null) {
|
||||
Object value = deleted;
|
||||
if (eitherValue.isLeft()) {
|
||||
value = eitherValue.getLeft();
|
||||
}
|
||||
result = Optional.of(value);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!result.isPresent()) {
|
||||
// Be sure to check all enclosing UnitOfWork caches as well, we may be nested.
|
||||
if (parent != null) {
|
||||
return parent.cacheLookup(facets);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Facet> cacheEvict(List<Facet> facets) {
|
||||
Either<Object, List<Facet>> deletedObjectFacets = Either.right(facets);
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
Optional<Object> optionalValue = cacheLookup(facets);
|
||||
if (optionalValue.isPresent()) {
|
||||
Object value = optionalValue.get();
|
||||
|
||||
for (Facet facet : facets) {
|
||||
if (!facet.fixed()) {
|
||||
String columnKey = facet.name() + "==" + facet.value();
|
||||
// mark the value identified by the facet to `deleted`
|
||||
cache.put(tableName, columnKey, deletedObjectFacets);
|
||||
}
|
||||
}
|
||||
// look for other row/col pairs that referenced the same object, mark them
|
||||
// `deleted`
|
||||
cache.columnKeySet().forEach(columnKey -> {
|
||||
Either<Object, List<Facet>> eitherCachedValue = cache.get(tableName, columnKey);
|
||||
if (eitherCachedValue.isLeft()) {
|
||||
Object cachedValue = eitherCachedValue.getLeft();
|
||||
if (cachedValue == value) {
|
||||
cache.put(tableName, columnKey, deletedObjectFacets);
|
||||
String[] parts = columnKey.split("==");
|
||||
facets.add(new Facet<String>(parts[0], parts[1]));
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
return facets;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void cacheUpdate(Object value, List<Facet> facets) {
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
for (Facet facet : facets) {
|
||||
if (!facet.fixed()) {
|
||||
String columnName = facet.name() + "==" + facet.value();
|
||||
cache.put(tableName, columnName, Either.left(value));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private Iterator<AbstractUnitOfWork<E>> getChildNodes() {
|
||||
return nested.iterator();
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks to see if the work performed between calling begin and now can be
|
||||
* committed or not.
|
||||
*
|
||||
* @return a function from which to chain work that only happens when commit is
|
||||
* successful
|
||||
* @throws E
|
||||
* when the work overlaps with other concurrent writers.
|
||||
*/
|
||||
public PostCommitFunction<Void, Void> commit() throws E {
|
||||
// All nested UnitOfWork should be committed (not aborted) before calls to
|
||||
// commit, check.
|
||||
boolean canCommit = true;
|
||||
TreeTraverser<AbstractUnitOfWork<E>> traverser = TreeTraverser.using(node -> node::getChildNodes);
|
||||
for (AbstractUnitOfWork<E> uow : traverser.postOrderTraversal(this)) {
|
||||
if (this != uow) {
|
||||
canCommit &= (!uow.aborted && uow.committed);
|
||||
}
|
||||
}
|
||||
|
||||
// log.recordCacheAndDatabaseOperationCount(txn::provisionalCommit)
|
||||
// examine log for conflicts in read-set and write-set between begin and
|
||||
// provisional commit
|
||||
// if (conflict) { throw new ConflictingUnitOfWorkException(this) }
|
||||
// else return function so as to enable commit.andThen(() -> { do something iff
|
||||
// commit was successful; })
|
||||
|
||||
if (canCommit) {
|
||||
committed = true;
|
||||
aborted = false;
|
||||
|
||||
nested.forEach((uow) -> Errors.rethrow().wrap(uow::commit));
|
||||
elapsedTime.stop();
|
||||
|
||||
if (parent == null) {
|
||||
// Apply all post-commit functions, this is the outter-most UnitOfWork.
|
||||
traverser.postOrderTraversal(this).forEach(uow -> {
|
||||
uow.applyPostCommitFunctions();
|
||||
});
|
||||
|
||||
// Merge our cache into the session cache.
|
||||
session.mergeCache(cache);
|
||||
|
||||
return new PostCommitFunction(this, null);
|
||||
} else {
|
||||
|
||||
// Merge cache and statistics into parent if there is one.
|
||||
parent.mergeCache(cache);
|
||||
|
||||
parent.cacheHits += cacheHits;
|
||||
parent.cacheMisses += cacheMisses;
|
||||
parent.databaseLookups += databaseLookups;
|
||||
parent.cacheLookupTime += cacheLookupTime;
|
||||
for (String name : databaseTime.keySet()) {
|
||||
if (parent.databaseTime.containsKey(name)) {
|
||||
double t = parent.databaseTime.get(name);
|
||||
parent.databaseTime.put(name, t + databaseTime.get(name));
|
||||
} else {
|
||||
parent.databaseTime.put(name, databaseTime.get(name));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// else {
|
||||
// Constructor<T> ctor = clazz.getConstructor(conflictExceptionClass);
|
||||
// T object = ctor.newInstance(new Object[] { String message });
|
||||
// }
|
||||
return new PostCommitFunction(this, postCommit);
|
||||
}
|
||||
|
||||
/* Explicitly discard the work and mark it as as such in the log. */
|
||||
public synchronized void abort() {
|
||||
TreeTraverser<AbstractUnitOfWork<E>> traverser = TreeTraverser.using(node -> node::getChildNodes);
|
||||
traverser.postOrderTraversal(this).forEach(uow -> {
|
||||
uow.committed = false;
|
||||
uow.aborted = true;
|
||||
});
|
||||
// log.recordCacheAndDatabaseOperationCount(txn::abort)
|
||||
// cache.invalidateSince(txn::start time)
|
||||
if (!hasAborted()) {
|
||||
elapsedTime.stop();
|
||||
if (LOG.isInfoEnabled()) {
|
||||
LOG.info(logTimers("aborted"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void mergeCache(Table<String, String, Either<Object, List<Facet>>> from) {
|
||||
Table<String, String, Either<Object, List<Facet>>> to = this.cache;
|
||||
from.rowMap().forEach((rowKey, columnMap) -> {
|
||||
columnMap.forEach((columnKey, value) -> {
|
||||
if (to.contains(rowKey, columnKey)) {
|
||||
// TODO(gburd):...
|
||||
to.put(rowKey, columnKey, Either.left(CacheUtil.merge(to.get(rowKey, columnKey).getLeft(),
|
||||
from.get(rowKey, columnKey).getLeft())));
|
||||
} else {
|
||||
to.put(rowKey, columnKey, from.get(rowKey, columnKey));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
public String describeConflicts() {
|
||||
return "it's complex...";
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws E {
|
||||
// Closing a AbstractUnitOfWork will abort iff we've not already aborted or
|
||||
// committed this unit of work.
|
||||
if (aborted == false && committed == false) {
|
||||
abort();
|
||||
}
|
||||
}
|
||||
|
||||
public boolean hasAborted() {
|
||||
return aborted;
|
||||
}
|
||||
|
||||
public boolean hasCommitted() {
|
||||
return committed;
|
||||
}
|
||||
}
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -17,8 +16,5 @@
|
|||
package net.helenus.core;
|
||||
|
||||
public enum AutoDdl {
|
||||
VALIDATE,
|
||||
UPDATE,
|
||||
CREATE,
|
||||
CREATE_DROP;
|
||||
VALIDATE, UPDATE, CREATE, CREATE_DROP;
|
||||
}
|
||||
|
|
6
src/main/java/net/helenus/core/CommitThunk.java
Normal file
6
src/main/java/net/helenus/core/CommitThunk.java
Normal file
|
@ -0,0 +1,6 @@
|
|||
package net.helenus.core;
|
||||
|
||||
@FunctionalInterface
|
||||
public interface CommitThunk {
|
||||
void apply();
|
||||
}
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,15 +15,13 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import com.datastax.driver.core.Metadata;
|
||||
import java.util.Optional;
|
||||
|
||||
import com.datastax.driver.core.Metadata;
|
||||
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
|
||||
public interface DslInstantiator {
|
||||
|
||||
<E> E instantiate(
|
||||
Class<E> iface,
|
||||
ClassLoader classLoader,
|
||||
Optional<HelenusPropertyNode> parent,
|
||||
Metadata metadata);
|
||||
<E> E instantiate(Class<E> iface, ClassLoader classLoader, Optional<HelenusPropertyNode> parent, Metadata metadata);
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,8 +15,10 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import com.datastax.driver.core.querybuilder.Clause;
|
||||
import java.util.Objects;
|
||||
|
||||
import com.datastax.driver.core.querybuilder.Clause;
|
||||
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.mapping.MappingUtil;
|
||||
import net.helenus.mapping.value.ColumnValuePreparer;
|
||||
|
@ -80,21 +81,13 @@ public final class Filter<V> {
|
|||
return new Filter<V>(node, postulate);
|
||||
}
|
||||
|
||||
public static <V> Filter<V> create(
|
||||
Getter<V> getter, HelenusPropertyNode node, Postulate<V> postulate) {
|
||||
Objects.requireNonNull(getter, "empty getter");
|
||||
Objects.requireNonNull(postulate, "empty operator");
|
||||
return new Filter<V>(node, postulate);
|
||||
}
|
||||
|
||||
public static <V> Filter<V> create(Getter<V> getter, Operator op, V val) {
|
||||
Objects.requireNonNull(getter, "empty getter");
|
||||
Objects.requireNonNull(op, "empty op");
|
||||
Objects.requireNonNull(val, "empty value");
|
||||
|
||||
if (op == Operator.IN) {
|
||||
throw new IllegalArgumentException(
|
||||
"invalid usage of the 'in' operator, use Filter.in() static method");
|
||||
throw new IllegalArgumentException("invalid usage of the 'in' operator, use Filter.in() static method");
|
||||
}
|
||||
|
||||
HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter);
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,15 +15,17 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import com.datastax.driver.core.Cluster;
|
||||
import com.datastax.driver.core.Metadata;
|
||||
import com.datastax.driver.core.Session;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
import com.datastax.driver.core.Cluster;
|
||||
import com.datastax.driver.core.Metadata;
|
||||
import com.datastax.driver.core.Session;
|
||||
|
||||
import net.helenus.config.DefaultHelenusSettings;
|
||||
import net.helenus.config.HelenusSettings;
|
||||
import net.helenus.core.reflect.DslExportable;
|
||||
|
@ -34,15 +35,14 @@ import net.helenus.support.HelenusMappingException;
|
|||
|
||||
public final class Helenus {
|
||||
|
||||
private static final ConcurrentMap<Class<?>, Object> dslCache =
|
||||
new ConcurrentHashMap<Class<?>, Object>();
|
||||
private static final ConcurrentMap<Class<?>, Metadata> metadataForEntity =
|
||||
new ConcurrentHashMap<Class<?>, Metadata>();
|
||||
private static final ConcurrentMap<Class<?>, Object> dslCache = new ConcurrentHashMap<Class<?>, Object>();
|
||||
private static final ConcurrentMap<Class<?>, Metadata> metadataForEntity = new ConcurrentHashMap<Class<?>, Metadata>();
|
||||
private static final Set<HelenusSession> sessions = new HashSet<HelenusSession>();
|
||||
private static volatile HelenusSettings settings = new DefaultHelenusSettings();
|
||||
private static volatile HelenusSession singleton;
|
||||
|
||||
private Helenus() {}
|
||||
private Helenus() {
|
||||
}
|
||||
|
||||
protected static void setSession(HelenusSession session) {
|
||||
sessions.add(session);
|
||||
|
@ -54,8 +54,7 @@ public final class Helenus {
|
|||
}
|
||||
|
||||
public static void shutdown() {
|
||||
sessions.forEach(
|
||||
(session) -> {
|
||||
sessions.forEach((session) -> {
|
||||
session.close();
|
||||
sessions.remove(session);
|
||||
});
|
||||
|
@ -82,10 +81,6 @@ public final class Helenus {
|
|||
return new SessionInitializer(session);
|
||||
}
|
||||
|
||||
public static SessionInitializer init(Session session, String keyspace) {
|
||||
return new SessionInitializer(session, keyspace);
|
||||
}
|
||||
|
||||
public static SessionInitializer init(Session session) {
|
||||
|
||||
if (session == null) {
|
||||
|
@ -111,10 +106,7 @@ public final class Helenus {
|
|||
return dsl(iface, classLoader, Optional.empty(), metadata);
|
||||
}
|
||||
|
||||
public static <E> E dsl(
|
||||
Class<E> iface,
|
||||
ClassLoader classLoader,
|
||||
Optional<HelenusPropertyNode> parent,
|
||||
public static <E> E dsl(Class<E> iface, ClassLoader classLoader, Optional<HelenusPropertyNode> parent,
|
||||
Metadata metadata) {
|
||||
|
||||
Object instance = null;
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -18,17 +17,27 @@ package net.helenus.core;
|
|||
|
||||
import static net.helenus.core.Query.eq;
|
||||
|
||||
import com.codahale.metrics.MetricRegistry;
|
||||
import com.datastax.driver.core.*;
|
||||
import com.google.common.collect.Table;
|
||||
import java.io.Closeable;
|
||||
import java.io.PrintStream;
|
||||
import java.lang.reflect.Constructor;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.Executor;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
import javax.cache.Cache;
|
||||
import javax.cache.CacheManager;
|
||||
|
||||
import net.helenus.core.cache.SessionCache;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.codahale.metrics.MetricRegistry;
|
||||
import com.datastax.driver.core.*;
|
||||
import com.google.common.cache.Cache;
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import com.google.common.collect.Table;
|
||||
|
||||
import brave.Tracer;
|
||||
import net.helenus.core.cache.CacheUtil;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.core.cache.UnboundFacet;
|
||||
|
@ -45,68 +54,58 @@ import net.helenus.support.Fun.Tuple1;
|
|||
import net.helenus.support.Fun.Tuple2;
|
||||
import net.helenus.support.Fun.Tuple6;
|
||||
|
||||
public class HelenusSession extends AbstractSessionOperations implements Closeable {
|
||||
public final class HelenusSession extends AbstractSessionOperations implements Closeable {
|
||||
|
||||
public static final Object deleted = new Object();
|
||||
private static final Logger LOG = LoggerFactory.getLogger(HelenusSession.class);
|
||||
|
||||
private final Session session;
|
||||
private final CodecRegistry registry;
|
||||
private final ConsistencyLevel defaultConsistencyLevel;
|
||||
private final boolean defaultQueryIdempotency;
|
||||
private final MetricRegistry metricRegistry;
|
||||
private final Tracer zipkinTracer;
|
||||
private final PrintStream printStream;
|
||||
private final Class<? extends UnitOfWork> unitOfWorkClass;
|
||||
private final SessionRepository sessionRepository;
|
||||
private final Executor executor;
|
||||
private final boolean dropSchemaOnClose;
|
||||
private final CacheManager cacheManager;
|
||||
private final SessionCache<String, Object> sessionCache;
|
||||
private final RowColumnValueProvider valueProvider;
|
||||
private final StatementColumnValuePreparer valuePreparer;
|
||||
private final Metadata metadata;
|
||||
private volatile String usingKeyspace;
|
||||
private volatile boolean showCql;
|
||||
private volatile boolean showValues;
|
||||
|
||||
HelenusSession(
|
||||
Session session,
|
||||
String usingKeyspace,
|
||||
CodecRegistry registry,
|
||||
boolean showCql,
|
||||
boolean showValues,
|
||||
PrintStream printStream,
|
||||
SessionRepositoryBuilder sessionRepositoryBuilder,
|
||||
Executor executor,
|
||||
boolean dropSchemaOnClose,
|
||||
ConsistencyLevel consistencyLevel,
|
||||
boolean defaultQueryIdempotency,
|
||||
CacheManager cacheManager,
|
||||
MetricRegistry metricRegistry) {
|
||||
HelenusSession(Session session, String usingKeyspace, CodecRegistry registry, boolean showCql,
|
||||
PrintStream printStream, SessionRepositoryBuilder sessionRepositoryBuilder, Executor executor,
|
||||
boolean dropSchemaOnClose, ConsistencyLevel consistencyLevel, boolean defaultQueryIdempotency,
|
||||
Class<? extends UnitOfWork> unitOfWorkClass, SessionCache sessionCache,
|
||||
MetricRegistry metricRegistry, Tracer tracer) {
|
||||
this.session = session;
|
||||
this.registry = registry == null ? CodecRegistry.DEFAULT_INSTANCE : registry;
|
||||
this.usingKeyspace =
|
||||
Objects.requireNonNull(
|
||||
usingKeyspace, "keyspace needs to be selected before creating session");
|
||||
this.usingKeyspace = Objects.requireNonNull(usingKeyspace,
|
||||
"keyspace needs to be selected before creating session");
|
||||
this.showCql = showCql;
|
||||
this.showValues = showValues;
|
||||
this.printStream = printStream;
|
||||
this.sessionRepository =
|
||||
sessionRepositoryBuilder == null ? null : sessionRepositoryBuilder.build();
|
||||
this.sessionRepository = sessionRepositoryBuilder.build();
|
||||
this.executor = executor;
|
||||
this.dropSchemaOnClose = dropSchemaOnClose;
|
||||
this.defaultConsistencyLevel = consistencyLevel;
|
||||
this.defaultQueryIdempotency = defaultQueryIdempotency;
|
||||
this.unitOfWorkClass = unitOfWorkClass;
|
||||
this.metricRegistry = metricRegistry;
|
||||
this.cacheManager = cacheManager;
|
||||
this.zipkinTracer = tracer;
|
||||
|
||||
if (sessionCache == null) {
|
||||
this.sessionCache = SessionCache.<String, Object>defaultCache();
|
||||
} else {
|
||||
this.sessionCache = sessionCache;
|
||||
}
|
||||
|
||||
this.valueProvider = new RowColumnValueProvider(this.sessionRepository);
|
||||
this.valuePreparer = new StatementColumnValuePreparer(this.sessionRepository);
|
||||
this.metadata = session == null ? null : session.getCluster().getMetadata();
|
||||
}
|
||||
|
||||
public UnitOfWork begin() {
|
||||
return new UnitOfWork(this).begin();
|
||||
}
|
||||
|
||||
public UnitOfWork begin(UnitOfWork parent) {
|
||||
return new UnitOfWork(this, parent).begin();
|
||||
this.metadata = session.getCluster().getMetadata();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -145,20 +144,6 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
|
|||
return this;
|
||||
}
|
||||
|
||||
public HelenusSession showQueryValuesInLog(boolean showValues) {
|
||||
this.showValues = showValues;
|
||||
return this;
|
||||
}
|
||||
|
||||
public HelenusSession showQueryValuesInLog() {
|
||||
this.showValues = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
public boolean showValues() {
|
||||
return showValues;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Executor getExecutor() {
|
||||
return executor;
|
||||
|
@ -179,6 +164,11 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
|
|||
return valuePreparer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Tracer getZipkinTracer() {
|
||||
return zipkinTracer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetricRegistry getMetricRegistry() {
|
||||
return metricRegistry;
|
||||
|
@ -196,36 +186,31 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
|
|||
|
||||
@Override
|
||||
public Object checkCache(String tableName, List<Facet> facets) {
|
||||
List<String[]> facetCombinations = CacheUtil.flattenFacets(facets);
|
||||
Object result = null;
|
||||
if (cacheManager != null) {
|
||||
Cache<String, Object> cache = cacheManager.getCache(tableName);
|
||||
if (cache != null) {
|
||||
for (String key : CacheUtil.flatKeys(tableName, facets)) {
|
||||
result = cache.get(key);
|
||||
for (String[] combination : facetCombinations) {
|
||||
String cacheKey = tableName + "." + Arrays.toString(combination);
|
||||
result = sessionCache.getIfPresent(cacheKey);
|
||||
if (result != null) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void cacheEvict(List<Facet> facets) {
|
||||
if (cacheManager != null) {
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
Cache<String, Object> cache = cacheManager.getCache(tableName);
|
||||
if (cache != null) {
|
||||
CacheUtil.flatKeys(tableName, facets).forEach(key -> cache.remove(key));
|
||||
}
|
||||
List<String[]> facetCombinations = CacheUtil.flattenFacets(facets);
|
||||
for (String[] combination : facetCombinations) {
|
||||
String cacheKey = tableName + "." + Arrays.toString(combination);
|
||||
sessionCache.invalidate(cacheKey);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void updateCache(Object pojo, List<Facet> facets) {
|
||||
Map<String, Object> valueMap =
|
||||
pojo instanceof MapExportable ? ((MapExportable) pojo).toMap() : null;
|
||||
Map<String, Object> valueMap = pojo instanceof MapExportable ? ((MapExportable) pojo).toMap() : null;
|
||||
List<Facet> boundFacets = new ArrayList<>();
|
||||
for (Facet facet : facets) {
|
||||
if (facet instanceof UnboundFacet) {
|
||||
|
@ -240,7 +225,7 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
|
|||
}
|
||||
} else {
|
||||
value = valueMap.get(prop.getPropertyName());
|
||||
if (value != null) binder.setValueForProperty(prop, value.toString());
|
||||
binder.setValueForProperty(prop, value.toString());
|
||||
}
|
||||
}
|
||||
if (binder.isBound()) {
|
||||
|
@ -252,43 +237,38 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
|
|||
}
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
List<String[]> facetCombinations = CacheUtil.flattenFacets(boundFacets);
|
||||
replaceCachedFacetValues(pojo, tableName, facetCombinations);
|
||||
mergeAndUpdateCacheValues(pojo, tableName, facetCombinations);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void mergeCache(Table<String, String, Either<Object, List<Facet>>> uowCache) {
|
||||
if (cacheManager != null) {
|
||||
List<Object> items =
|
||||
uowCache
|
||||
.values()
|
||||
.stream()
|
||||
.filter(Either::isLeft)
|
||||
.map(Either::getLeft)
|
||||
.distinct()
|
||||
.collect(Collectors.toList());
|
||||
for (Object pojo : items) {
|
||||
List<Either<Object, List<Facet>>> items = uowCache.values().stream().distinct().collect(Collectors.toList());
|
||||
for (Either<Object, List<Facet>> item : items) {
|
||||
if (item.isRight()) {
|
||||
List<Facet> facets = item.getRight();
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
List<String[]> combinations = CacheUtil.flattenFacets(facets);
|
||||
for (String[] combination : combinations) {
|
||||
String cacheKey = tableName + "." + Arrays.toString(combination);
|
||||
sessionCache.invalidate(cacheKey);
|
||||
}
|
||||
} else {
|
||||
Object pojo = item.getLeft();
|
||||
HelenusEntity entity = Helenus.resolve(MappingUtil.getMappingInterface(pojo));
|
||||
Map<String, Object> valueMap =
|
||||
pojo instanceof MapExportable ? ((MapExportable) pojo).toMap() : null;
|
||||
Map<String, Object> valueMap = pojo instanceof MapExportable ? ((MapExportable) pojo).toMap() : null;
|
||||
if (entity.isCacheable()) {
|
||||
List<Facet> boundFacets = new ArrayList<>();
|
||||
for (Facet facet : entity.getFacets()) {
|
||||
if (facet instanceof UnboundFacet) {
|
||||
UnboundFacet unboundFacet = (UnboundFacet) facet;
|
||||
UnboundFacet.Binder binder = unboundFacet.binder();
|
||||
unboundFacet
|
||||
.getProperties()
|
||||
.forEach(
|
||||
prop -> {
|
||||
unboundFacet.getProperties().forEach(prop -> {
|
||||
if (valueMap == null) {
|
||||
Object value =
|
||||
BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop);
|
||||
Object value = BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop,
|
||||
false);
|
||||
binder.setValueForProperty(prop, value.toString());
|
||||
} else {
|
||||
Object v = valueMap.get(prop.getPropertyName());
|
||||
if (v != null) {
|
||||
binder.setValueForProperty(prop, v.toString());
|
||||
}
|
||||
binder.setValueForProperty(prop, valueMap.get(prop.getPropertyName()).toString());
|
||||
}
|
||||
});
|
||||
if (binder.isBound()) {
|
||||
|
@ -298,66 +278,78 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
|
|||
boundFacets.add(facet);
|
||||
}
|
||||
}
|
||||
// NOTE: should equal `String tableName = CacheUtil.schemaName(facets);`
|
||||
List<String[]> facetCombinations = CacheUtil.flattenFacets(boundFacets);
|
||||
String tableName = CacheUtil.schemaName(boundFacets);
|
||||
replaceCachedFacetValues(pojo, tableName, facetCombinations);
|
||||
}
|
||||
}
|
||||
|
||||
List<List<Facet>> deletedFacetSets =
|
||||
uowCache
|
||||
.values()
|
||||
.stream()
|
||||
.filter(Either::isRight)
|
||||
.map(Either::getRight)
|
||||
.collect(Collectors.toList());
|
||||
for (List<Facet> facets : deletedFacetSets) {
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
Cache<String, Object> cache = cacheManager.getCache(tableName);
|
||||
if (cache != null) {
|
||||
List<String> keys = CacheUtil.flatKeys(tableName, facets);
|
||||
keys.forEach(key -> cache.remove(key));
|
||||
mergeAndUpdateCacheValues(pojo, tableName, facetCombinations);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void replaceCachedFacetValues(
|
||||
Object pojo, String tableName, List<String[]> facetCombinations) {
|
||||
if (cacheManager != null) {
|
||||
private void mergeAndUpdateCacheValues(Object pojo, String tableName, List<String[]> facetCombinations) {
|
||||
Object merged = null;
|
||||
for (String[] combination : facetCombinations) {
|
||||
String cacheKey = tableName + "." + Arrays.toString(combination);
|
||||
Cache<String, Object> cache = cacheManager.getCache(tableName);
|
||||
if (cache != null) {
|
||||
if (pojo == null || pojo == HelenusSession.deleted) {
|
||||
cache.remove(cacheKey);
|
||||
Object value = sessionCache.get(cacheKey);
|
||||
if (value == null) {
|
||||
sessionCache.put(cacheKey, pojo);
|
||||
} else {
|
||||
cache.put(cacheKey, pojo);
|
||||
if (merged == null) {
|
||||
merged = pojo;
|
||||
} else {
|
||||
merged = CacheUtil.merge(value, pojo);
|
||||
}
|
||||
sessionCache.put(cacheKey, merged);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public CacheManager getCacheManager() {
|
||||
return cacheManager;
|
||||
}
|
||||
|
||||
public Metadata getMetadata() {
|
||||
return metadata;
|
||||
}
|
||||
|
||||
public UnitOfWork begin() {
|
||||
return this.begin(null);
|
||||
}
|
||||
|
||||
public synchronized UnitOfWork begin(UnitOfWork parent) {
|
||||
StringBuilder purpose = null;
|
||||
if (LOG.isInfoEnabled()) {
|
||||
StackTraceElement[] trace = Thread.currentThread().getStackTrace();
|
||||
int frame = 2;
|
||||
if (trace[2].getMethodName().equals("begin")) {
|
||||
frame = 3;
|
||||
}
|
||||
purpose = new StringBuilder().append(trace[frame].getClassName()).append(".")
|
||||
.append(trace[frame].getMethodName()).append("(").append(trace[frame].getFileName()).append(":")
|
||||
.append(trace[frame].getLineNumber()).append(")");
|
||||
}
|
||||
try {
|
||||
Class<? extends UnitOfWork> clazz = unitOfWorkClass;
|
||||
Constructor<? extends UnitOfWork> ctor = clazz.getConstructor(HelenusSession.class, UnitOfWork.class);
|
||||
UnitOfWork uow = ctor.newInstance(this, parent);
|
||||
if (LOG.isInfoEnabled() && purpose != null) {
|
||||
uow.setPurpose(purpose.toString());
|
||||
}
|
||||
if (parent != null) {
|
||||
parent.addNestedUnitOfWork(uow);
|
||||
}
|
||||
return uow.begin();
|
||||
} catch (NoSuchMethodException | InvocationTargetException | InstantiationException
|
||||
| IllegalAccessException e) {
|
||||
throw new HelenusException(
|
||||
String.format("Unable to instantiate {} as a UnitOfWork.", unitOfWorkClass.getSimpleName()), e);
|
||||
}
|
||||
}
|
||||
|
||||
public <E> SelectOperation<E> select(E pojo) {
|
||||
Objects.requireNonNull(
|
||||
pojo, "supplied object must be a dsl for a registered entity but cannot be null");
|
||||
Objects.requireNonNull(pojo, "supplied object must be a dsl for a registered entity but cannot be null");
|
||||
ColumnValueProvider valueProvider = getValueProvider();
|
||||
HelenusEntity entity = Helenus.resolve(pojo);
|
||||
Class<?> entityClass = entity.getMappingInterface();
|
||||
|
||||
return new SelectOperation<E>(
|
||||
this,
|
||||
entity,
|
||||
(r) -> {
|
||||
return new SelectOperation<E>(this, entity, (r) -> {
|
||||
Map<String, Object> map = new ValueProviderMap(r, valueProvider, entity);
|
||||
return (E) Helenus.map(entityClass, map);
|
||||
});
|
||||
|
@ -368,10 +360,7 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
|
|||
ColumnValueProvider valueProvider = getValueProvider();
|
||||
HelenusEntity entity = Helenus.entity(entityClass);
|
||||
|
||||
return new SelectOperation<E>(
|
||||
this,
|
||||
entity,
|
||||
(r) -> {
|
||||
return new SelectOperation<E>(this, entity, (r) -> {
|
||||
Map<String, Object> map = new ValueProviderMap(r, valueProvider, entity);
|
||||
return (E) Helenus.map(entityClass, map);
|
||||
});
|
||||
|
@ -381,22 +370,13 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
|
|||
return new SelectOperation<Fun.ArrayTuple>(this);
|
||||
}
|
||||
|
||||
public <E> SelectOperation<E> selectAll(Class<E> entityClass) {
|
||||
public SelectOperation<Row> selectAll(Class<?> entityClass) {
|
||||
Objects.requireNonNull(entityClass, "entityClass is empty");
|
||||
HelenusEntity entity = Helenus.entity(entityClass);
|
||||
|
||||
return new SelectOperation<E>(
|
||||
this,
|
||||
entity,
|
||||
(r) -> {
|
||||
Map<String, Object> map = new ValueProviderMap(r, valueProvider, entity);
|
||||
return (E) Helenus.map(entityClass, map);
|
||||
});
|
||||
return new SelectOperation<Row>(this, Helenus.entity(entityClass));
|
||||
}
|
||||
|
||||
public <E> SelectOperation<Row> selectAll(E pojo) {
|
||||
Objects.requireNonNull(
|
||||
pojo, "supplied object must be a dsl for a registered entity but cannot be null");
|
||||
Objects.requireNonNull(pojo, "supplied object must be a dsl for a registered entity but cannot be null");
|
||||
HelenusEntity entity = Helenus.resolve(pojo);
|
||||
return new SelectOperation<Row>(this, entity);
|
||||
}
|
||||
|
@ -411,8 +391,7 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
|
|||
Objects.requireNonNull(getter1, "field 1 is empty");
|
||||
|
||||
HelenusPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
|
||||
return new SelectOperation<Tuple1<V1>>(
|
||||
this, new Mappers.Mapper1<V1>(getValueProvider(), p1), p1);
|
||||
return new SelectOperation<Tuple1<V1>>(this, new Mappers.Mapper1<V1>(getValueProvider(), p1), p1);
|
||||
}
|
||||
|
||||
public <V1, V2> SelectOperation<Tuple2<V1, V2>> select(Getter<V1> getter1, Getter<V2> getter2) {
|
||||
|
@ -421,12 +400,12 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
|
|||
|
||||
HelenusPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
|
||||
HelenusPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
|
||||
return new SelectOperation<Fun.Tuple2<V1, V2>>(
|
||||
this, new Mappers.Mapper2<V1, V2>(getValueProvider(), p1, p2), p1, p2);
|
||||
return new SelectOperation<Fun.Tuple2<V1, V2>>(this, new Mappers.Mapper2<V1, V2>(getValueProvider(), p1, p2),
|
||||
p1, p2);
|
||||
}
|
||||
|
||||
public <V1, V2, V3> SelectOperation<Fun.Tuple3<V1, V2, V3>> select(
|
||||
Getter<V1> getter1, Getter<V2> getter2, Getter<V3> getter3) {
|
||||
public <V1, V2, V3> SelectOperation<Fun.Tuple3<V1, V2, V3>> select(Getter<V1> getter1, Getter<V2> getter2,
|
||||
Getter<V3> getter3) {
|
||||
Objects.requireNonNull(getter1, "field 1 is empty");
|
||||
Objects.requireNonNull(getter2, "field 2 is empty");
|
||||
Objects.requireNonNull(getter3, "field 3 is empty");
|
||||
|
@ -434,12 +413,12 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
|
|||
HelenusPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
|
||||
HelenusPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
|
||||
HelenusPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3);
|
||||
return new SelectOperation<Fun.Tuple3<V1, V2, V3>>(
|
||||
this, new Mappers.Mapper3<V1, V2, V3>(getValueProvider(), p1, p2, p3), p1, p2, p3);
|
||||
return new SelectOperation<Fun.Tuple3<V1, V2, V3>>(this,
|
||||
new Mappers.Mapper3<V1, V2, V3>(getValueProvider(), p1, p2, p3), p1, p2, p3);
|
||||
}
|
||||
|
||||
public <V1, V2, V3, V4> SelectOperation<Fun.Tuple4<V1, V2, V3, V4>> select(
|
||||
Getter<V1> getter1, Getter<V2> getter2, Getter<V3> getter3, Getter<V4> getter4) {
|
||||
public <V1, V2, V3, V4> SelectOperation<Fun.Tuple4<V1, V2, V3, V4>> select(Getter<V1> getter1, Getter<V2> getter2,
|
||||
Getter<V3> getter3, Getter<V4> getter4) {
|
||||
Objects.requireNonNull(getter1, "field 1 is empty");
|
||||
Objects.requireNonNull(getter2, "field 2 is empty");
|
||||
Objects.requireNonNull(getter3, "field 3 is empty");
|
||||
|
@ -449,21 +428,12 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
|
|||
HelenusPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
|
||||
HelenusPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3);
|
||||
HelenusPropertyNode p4 = MappingUtil.resolveMappingProperty(getter4);
|
||||
return new SelectOperation<Fun.Tuple4<V1, V2, V3, V4>>(
|
||||
this,
|
||||
new Mappers.Mapper4<V1, V2, V3, V4>(getValueProvider(), p1, p2, p3, p4),
|
||||
p1,
|
||||
p2,
|
||||
p3,
|
||||
p4);
|
||||
return new SelectOperation<Fun.Tuple4<V1, V2, V3, V4>>(this,
|
||||
new Mappers.Mapper4<V1, V2, V3, V4>(getValueProvider(), p1, p2, p3, p4), p1, p2, p3, p4);
|
||||
}
|
||||
|
||||
public <V1, V2, V3, V4, V5> SelectOperation<Fun.Tuple5<V1, V2, V3, V4, V5>> select(
|
||||
Getter<V1> getter1,
|
||||
Getter<V2> getter2,
|
||||
Getter<V3> getter3,
|
||||
Getter<V4> getter4,
|
||||
Getter<V5> getter5) {
|
||||
public <V1, V2, V3, V4, V5> SelectOperation<Fun.Tuple5<V1, V2, V3, V4, V5>> select(Getter<V1> getter1,
|
||||
Getter<V2> getter2, Getter<V3> getter3, Getter<V4> getter4, Getter<V5> getter5) {
|
||||
Objects.requireNonNull(getter1, "field 1 is empty");
|
||||
Objects.requireNonNull(getter2, "field 2 is empty");
|
||||
Objects.requireNonNull(getter3, "field 3 is empty");
|
||||
|
@ -475,23 +445,12 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
|
|||
HelenusPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3);
|
||||
HelenusPropertyNode p4 = MappingUtil.resolveMappingProperty(getter4);
|
||||
HelenusPropertyNode p5 = MappingUtil.resolveMappingProperty(getter5);
|
||||
return new SelectOperation<Fun.Tuple5<V1, V2, V3, V4, V5>>(
|
||||
this,
|
||||
new Mappers.Mapper5<V1, V2, V3, V4, V5>(getValueProvider(), p1, p2, p3, p4, p5),
|
||||
p1,
|
||||
p2,
|
||||
p3,
|
||||
p4,
|
||||
p5);
|
||||
return new SelectOperation<Fun.Tuple5<V1, V2, V3, V4, V5>>(this,
|
||||
new Mappers.Mapper5<V1, V2, V3, V4, V5>(getValueProvider(), p1, p2, p3, p4, p5), p1, p2, p3, p4, p5);
|
||||
}
|
||||
|
||||
public <V1, V2, V3, V4, V5, V6> SelectOperation<Fun.Tuple6<V1, V2, V3, V4, V5, V6>> select(
|
||||
Getter<V1> getter1,
|
||||
Getter<V2> getter2,
|
||||
Getter<V3> getter3,
|
||||
Getter<V4> getter4,
|
||||
Getter<V5> getter5,
|
||||
Getter<V6> getter6) {
|
||||
public <V1, V2, V3, V4, V5, V6> SelectOperation<Fun.Tuple6<V1, V2, V3, V4, V5, V6>> select(Getter<V1> getter1,
|
||||
Getter<V2> getter2, Getter<V3> getter3, Getter<V4> getter4, Getter<V5> getter5, Getter<V6> getter6) {
|
||||
Objects.requireNonNull(getter1, "field 1 is empty");
|
||||
Objects.requireNonNull(getter2, "field 2 is empty");
|
||||
Objects.requireNonNull(getter3, "field 3 is empty");
|
||||
|
@ -505,26 +464,14 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
|
|||
HelenusPropertyNode p4 = MappingUtil.resolveMappingProperty(getter4);
|
||||
HelenusPropertyNode p5 = MappingUtil.resolveMappingProperty(getter5);
|
||||
HelenusPropertyNode p6 = MappingUtil.resolveMappingProperty(getter6);
|
||||
return new SelectOperation<Tuple6<V1, V2, V3, V4, V5, V6>>(
|
||||
this,
|
||||
new Mappers.Mapper6<V1, V2, V3, V4, V5, V6>(getValueProvider(), p1, p2, p3, p4, p5, p6),
|
||||
p1,
|
||||
p2,
|
||||
p3,
|
||||
p4,
|
||||
p5,
|
||||
p6);
|
||||
return new SelectOperation<Tuple6<V1, V2, V3, V4, V5, V6>>(this,
|
||||
new Mappers.Mapper6<V1, V2, V3, V4, V5, V6>(getValueProvider(), p1, p2, p3, p4, p5, p6), p1, p2, p3, p4,
|
||||
p5, p6);
|
||||
}
|
||||
|
||||
public <V1, V2, V3, V4, V5, V6, V7>
|
||||
SelectOperation<Fun.Tuple7<V1, V2, V3, V4, V5, V6, V7>> select(
|
||||
Getter<V1> getter1,
|
||||
Getter<V2> getter2,
|
||||
Getter<V3> getter3,
|
||||
Getter<V4> getter4,
|
||||
Getter<V5> getter5,
|
||||
Getter<V6> getter6,
|
||||
Getter<V7> getter7) {
|
||||
public <V1, V2, V3, V4, V5, V6, V7> SelectOperation<Fun.Tuple7<V1, V2, V3, V4, V5, V6, V7>> select(
|
||||
Getter<V1> getter1, Getter<V2> getter2, Getter<V3> getter3, Getter<V4> getter4, Getter<V5> getter5,
|
||||
Getter<V6> getter6, Getter<V7> getter7) {
|
||||
Objects.requireNonNull(getter1, "field 1 is empty");
|
||||
Objects.requireNonNull(getter2, "field 2 is empty");
|
||||
Objects.requireNonNull(getter3, "field 3 is empty");
|
||||
|
@ -540,17 +487,9 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
|
|||
HelenusPropertyNode p5 = MappingUtil.resolveMappingProperty(getter5);
|
||||
HelenusPropertyNode p6 = MappingUtil.resolveMappingProperty(getter6);
|
||||
HelenusPropertyNode p7 = MappingUtil.resolveMappingProperty(getter7);
|
||||
return new SelectOperation<Fun.Tuple7<V1, V2, V3, V4, V5, V6, V7>>(
|
||||
this,
|
||||
new Mappers.Mapper7<V1, V2, V3, V4, V5, V6, V7>(
|
||||
getValueProvider(), p1, p2, p3, p4, p5, p6, p7),
|
||||
p1,
|
||||
p2,
|
||||
p3,
|
||||
p4,
|
||||
p5,
|
||||
p6,
|
||||
p7);
|
||||
return new SelectOperation<Fun.Tuple7<V1, V2, V3, V4, V5, V6, V7>>(this,
|
||||
new Mappers.Mapper7<V1, V2, V3, V4, V5, V6, V7>(getValueProvider(), p1, p2, p3, p4, p5, p6, p7), p1, p2,
|
||||
p3, p4, p5, p6, p7);
|
||||
}
|
||||
|
||||
public CountOperation count() {
|
||||
|
@ -566,14 +505,6 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
|
|||
return new UpdateOperation<ResultSet>(this);
|
||||
}
|
||||
|
||||
public <E> UpdateOperation<E> update(Object pojo) {
|
||||
if (pojo instanceof MapExportable == false) {
|
||||
throw new HelenusMappingException(
|
||||
"update of objects that don't implement MapExportable is not yet supported");
|
||||
}
|
||||
return new UpdateOperation<E>(this, pojo);
|
||||
}
|
||||
|
||||
public <E> UpdateOperation<E> update(Drafted<E> drafted) {
|
||||
if (drafted instanceof AbstractEntityDraft == false) {
|
||||
throw new HelenusMappingException(
|
||||
|
@ -586,24 +517,19 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
|
|||
HelenusEntity entity = Helenus.entity(draft.getEntityClass());
|
||||
|
||||
// Add all the mutated values contained in the draft.
|
||||
entity
|
||||
.getOrderedProperties()
|
||||
.forEach(
|
||||
property -> {
|
||||
entity.getOrderedProperties().forEach(property -> {
|
||||
switch (property.getColumnType()) {
|
||||
case PARTITION_KEY:
|
||||
case CLUSTERING_COLUMN:
|
||||
case PARTITION_KEY :
|
||||
case CLUSTERING_COLUMN :
|
||||
break;
|
||||
default:
|
||||
default :
|
||||
String propertyName = property.getPropertyName();
|
||||
if (mutatedProperties.contains(propertyName)) {
|
||||
Object value = map.get(propertyName);
|
||||
Getter<Object> getter =
|
||||
new Getter<Object>() {
|
||||
Getter<Object> getter = new Getter<Object>() {
|
||||
@Override
|
||||
public Object get() {
|
||||
throw new DslPropertyException(
|
||||
new HelenusPropertyNode(property, Optional.empty()));
|
||||
throw new DslPropertyException(new HelenusPropertyNode(property, Optional.empty()));
|
||||
}
|
||||
};
|
||||
update.set(getter, value);
|
||||
|
@ -613,21 +539,16 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
|
|||
|
||||
// Add the partition and clustering keys if they were in the draft (normally the
|
||||
// case).
|
||||
entity
|
||||
.getOrderedProperties()
|
||||
.forEach(
|
||||
property -> {
|
||||
entity.getOrderedProperties().forEach(property -> {
|
||||
switch (property.getColumnType()) {
|
||||
case PARTITION_KEY:
|
||||
case CLUSTERING_COLUMN:
|
||||
case PARTITION_KEY :
|
||||
case CLUSTERING_COLUMN :
|
||||
String propertyName = property.getPropertyName();
|
||||
Object value = map.get(propertyName);
|
||||
Getter<Object> getter =
|
||||
new Getter<Object>() {
|
||||
Getter<Object> getter = new Getter<Object>() {
|
||||
@Override
|
||||
public Object get() {
|
||||
throw new DslPropertyException(
|
||||
new HelenusPropertyNode(property, Optional.empty()));
|
||||
throw new DslPropertyException(new HelenusPropertyNode(property, Optional.empty()));
|
||||
}
|
||||
};
|
||||
update.where(getter, eq(value));
|
||||
|
@ -655,8 +576,7 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
|
|||
}
|
||||
|
||||
public <T> InsertOperation<T> insert(T pojo) {
|
||||
Objects.requireNonNull(
|
||||
pojo,
|
||||
Objects.requireNonNull(pojo,
|
||||
"supplied object must be either an instance of the entity class or a dsl for it, but cannot be null");
|
||||
HelenusEntity entity = null;
|
||||
try {
|
||||
|
@ -664,23 +584,23 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
|
|||
} catch (HelenusMappingException e) {
|
||||
}
|
||||
if (entity != null) {
|
||||
return new InsertOperation<T>(this, entity, entity.getMappingInterface(), true);
|
||||
return new InsertOperation<T>(this, entity.getMappingInterface(), true);
|
||||
} else {
|
||||
return this.<T>insert(pojo, null, null);
|
||||
return this.<T>insert(pojo, null);
|
||||
}
|
||||
}
|
||||
|
||||
public <T> InsertOperation<T> insert(Drafted draft) {
|
||||
return insert(draft.build(), draft.mutated(), draft.read());
|
||||
return insert(draft.build(), draft.mutated());
|
||||
}
|
||||
|
||||
private <T> InsertOperation<T> insert(T pojo, Set<String> mutations, Set<String> read) {
|
||||
private <T> InsertOperation<T> insert(T pojo, Set<String> mutations) {
|
||||
Objects.requireNonNull(pojo, "pojo is empty");
|
||||
|
||||
Class<?> iface = MappingUtil.getMappingInterface(pojo);
|
||||
HelenusEntity entity = Helenus.entity(iface);
|
||||
|
||||
return new InsertOperation<T>(this, entity, pojo, mutations, read, true);
|
||||
return new InsertOperation<T>(this, entity, pojo, mutations, true);
|
||||
}
|
||||
|
||||
public InsertOperation<ResultSet> upsert() {
|
||||
|
@ -692,12 +612,11 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
|
|||
}
|
||||
|
||||
public <T> InsertOperation<T> upsert(Drafted draft) {
|
||||
return this.<T>upsert((T) draft.build(), draft.mutated(), draft.read());
|
||||
return this.<T>upsert((T) draft.build(), draft.mutated());
|
||||
}
|
||||
|
||||
public <T> InsertOperation<T> upsert(T pojo) {
|
||||
Objects.requireNonNull(
|
||||
pojo,
|
||||
Objects.requireNonNull(pojo,
|
||||
"supplied object must be either an instance of the entity class or a dsl for it, but cannot be null");
|
||||
HelenusEntity entity = null;
|
||||
try {
|
||||
|
@ -705,19 +624,19 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
|
|||
} catch (HelenusMappingException e) {
|
||||
}
|
||||
if (entity != null) {
|
||||
return new InsertOperation<T>(this, entity, entity.getMappingInterface(), false);
|
||||
return new InsertOperation<T>(this, entity.getMappingInterface(), false);
|
||||
} else {
|
||||
return this.<T>upsert(pojo, null, null);
|
||||
return this.<T>upsert(pojo, null);
|
||||
}
|
||||
}
|
||||
|
||||
private <T> InsertOperation<T> upsert(T pojo, Set<String> mutations, Set<String> read) {
|
||||
private <T> InsertOperation<T> upsert(T pojo, Set<String> mutations) {
|
||||
Objects.requireNonNull(pojo, "pojo is empty");
|
||||
|
||||
Class<?> iface = MappingUtil.getMappingInterface(pojo);
|
||||
HelenusEntity entity = Helenus.entity(iface);
|
||||
|
||||
return new InsertOperation<T>(this, entity, pojo, mutations, read, false);
|
||||
return new InsertOperation<T>(this, entity, pojo, mutations, false);
|
||||
}
|
||||
|
||||
public DeleteOperation delete() {
|
||||
|
@ -738,9 +657,6 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
|
|||
}
|
||||
|
||||
public void close() {
|
||||
if (session == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (session.isClosed()) {
|
||||
return;
|
||||
|
@ -770,16 +686,13 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
|
|||
private void dropEntity(HelenusEntity entity) {
|
||||
|
||||
switch (entity.getType()) {
|
||||
case TABLE:
|
||||
execute(SchemaUtil.dropTable(entity));
|
||||
case TABLE :
|
||||
execute(SchemaUtil.dropTable(entity), true);
|
||||
break;
|
||||
|
||||
case UDT:
|
||||
execute(SchemaUtil.dropUserType(entity));
|
||||
case UDT :
|
||||
execute(SchemaUtil.dropUserType(entity), true);
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new HelenusException("Unknown entity type.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -17,7 +16,9 @@
|
|||
package net.helenus.core;
|
||||
|
||||
import java.lang.annotation.Annotation;
|
||||
|
||||
import javax.validation.ConstraintValidator;
|
||||
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
import net.helenus.support.HelenusException;
|
||||
import net.helenus.support.HelenusMappingException;
|
||||
|
@ -36,8 +37,7 @@ public enum HelenusValidator implements PropertyValueValidator {
|
|||
try {
|
||||
valid = typeless.isValid(value, null);
|
||||
} catch (ClassCastException e) {
|
||||
throw new HelenusMappingException(
|
||||
"validator was used for wrong type '" + value + "' in " + prop, e);
|
||||
throw new HelenusMappingException("validator was used for wrong type '" + value + "' in " + prop, e);
|
||||
}
|
||||
|
||||
if (!valid) {
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,8 +15,10 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import com.datastax.driver.core.Row;
|
||||
import java.util.function.Function;
|
||||
|
||||
import com.datastax.driver.core.Row;
|
||||
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
import net.helenus.mapping.value.ColumnValueProvider;
|
||||
|
@ -25,7 +26,8 @@ import net.helenus.support.Fun;
|
|||
|
||||
public final class Mappers {
|
||||
|
||||
private Mappers() {}
|
||||
private Mappers() {
|
||||
}
|
||||
|
||||
public static final class Mapper1<A> implements Function<Row, Fun.Tuple1<A>> {
|
||||
|
||||
|
@ -57,8 +59,7 @@ public final class Mappers {
|
|||
|
||||
@Override
|
||||
public Fun.Tuple2<A, B> apply(Row row) {
|
||||
return new Fun.Tuple2<A, B>(
|
||||
provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2));
|
||||
return new Fun.Tuple2<A, B>(provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -69,10 +70,7 @@ public final class Mappers {
|
|||
private final HelenusProperty p2;
|
||||
private final HelenusProperty p3;
|
||||
|
||||
public Mapper3(
|
||||
ColumnValueProvider provider,
|
||||
HelenusPropertyNode p1,
|
||||
HelenusPropertyNode p2,
|
||||
public Mapper3(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2,
|
||||
HelenusPropertyNode p3) {
|
||||
this.provider = provider;
|
||||
this.p1 = p1.getProperty();
|
||||
|
@ -82,9 +80,7 @@ public final class Mappers {
|
|||
|
||||
@Override
|
||||
public Fun.Tuple3<A, B, C> apply(Row row) {
|
||||
return new Fun.Tuple3<A, B, C>(
|
||||
provider.getColumnValue(row, 0, p1),
|
||||
provider.getColumnValue(row, 1, p2),
|
||||
return new Fun.Tuple3<A, B, C>(provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2),
|
||||
provider.getColumnValue(row, 2, p3));
|
||||
}
|
||||
}
|
||||
|
@ -97,12 +93,8 @@ public final class Mappers {
|
|||
private final HelenusProperty p3;
|
||||
private final HelenusProperty p4;
|
||||
|
||||
public Mapper4(
|
||||
ColumnValueProvider provider,
|
||||
HelenusPropertyNode p1,
|
||||
HelenusPropertyNode p2,
|
||||
HelenusPropertyNode p3,
|
||||
HelenusPropertyNode p4) {
|
||||
public Mapper4(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2,
|
||||
HelenusPropertyNode p3, HelenusPropertyNode p4) {
|
||||
this.provider = provider;
|
||||
this.p1 = p1.getProperty();
|
||||
this.p2 = p2.getProperty();
|
||||
|
@ -112,27 +104,18 @@ public final class Mappers {
|
|||
|
||||
@Override
|
||||
public Fun.Tuple4<A, B, C, D> apply(Row row) {
|
||||
return new Fun.Tuple4<A, B, C, D>(
|
||||
provider.getColumnValue(row, 0, p1),
|
||||
provider.getColumnValue(row, 1, p2),
|
||||
provider.getColumnValue(row, 2, p3),
|
||||
provider.getColumnValue(row, 3, p4));
|
||||
return new Fun.Tuple4<A, B, C, D>(provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2),
|
||||
provider.getColumnValue(row, 2, p3), provider.getColumnValue(row, 3, p4));
|
||||
}
|
||||
}
|
||||
|
||||
public static final class Mapper5<A, B, C, D, E>
|
||||
implements Function<Row, Fun.Tuple5<A, B, C, D, E>> {
|
||||
public static final class Mapper5<A, B, C, D, E> implements Function<Row, Fun.Tuple5<A, B, C, D, E>> {
|
||||
|
||||
private final ColumnValueProvider provider;
|
||||
private final HelenusProperty p1, p2, p3, p4, p5;
|
||||
|
||||
public Mapper5(
|
||||
ColumnValueProvider provider,
|
||||
HelenusPropertyNode p1,
|
||||
HelenusPropertyNode p2,
|
||||
HelenusPropertyNode p3,
|
||||
HelenusPropertyNode p4,
|
||||
HelenusPropertyNode p5) {
|
||||
public Mapper5(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2,
|
||||
HelenusPropertyNode p3, HelenusPropertyNode p4, HelenusPropertyNode p5) {
|
||||
this.provider = provider;
|
||||
this.p1 = p1.getProperty();
|
||||
this.p2 = p2.getProperty();
|
||||
|
@ -143,29 +126,19 @@ public final class Mappers {
|
|||
|
||||
@Override
|
||||
public Fun.Tuple5<A, B, C, D, E> apply(Row row) {
|
||||
return new Fun.Tuple5<A, B, C, D, E>(
|
||||
provider.getColumnValue(row, 0, p1),
|
||||
provider.getColumnValue(row, 1, p2),
|
||||
provider.getColumnValue(row, 2, p3),
|
||||
provider.getColumnValue(row, 3, p4),
|
||||
provider.getColumnValue(row, 4, p5));
|
||||
return new Fun.Tuple5<A, B, C, D, E>(provider.getColumnValue(row, 0, p1),
|
||||
provider.getColumnValue(row, 1, p2), provider.getColumnValue(row, 2, p3),
|
||||
provider.getColumnValue(row, 3, p4), provider.getColumnValue(row, 4, p5));
|
||||
}
|
||||
}
|
||||
|
||||
public static final class Mapper6<A, B, C, D, E, F>
|
||||
implements Function<Row, Fun.Tuple6<A, B, C, D, E, F>> {
|
||||
public static final class Mapper6<A, B, C, D, E, F> implements Function<Row, Fun.Tuple6<A, B, C, D, E, F>> {
|
||||
|
||||
private final ColumnValueProvider provider;
|
||||
private final HelenusProperty p1, p2, p3, p4, p5, p6;
|
||||
|
||||
public Mapper6(
|
||||
ColumnValueProvider provider,
|
||||
HelenusPropertyNode p1,
|
||||
HelenusPropertyNode p2,
|
||||
HelenusPropertyNode p3,
|
||||
HelenusPropertyNode p4,
|
||||
HelenusPropertyNode p5,
|
||||
HelenusPropertyNode p6) {
|
||||
public Mapper6(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2,
|
||||
HelenusPropertyNode p3, HelenusPropertyNode p4, HelenusPropertyNode p5, HelenusPropertyNode p6) {
|
||||
this.provider = provider;
|
||||
this.p1 = p1.getProperty();
|
||||
this.p2 = p2.getProperty();
|
||||
|
@ -177,30 +150,20 @@ public final class Mappers {
|
|||
|
||||
@Override
|
||||
public Fun.Tuple6<A, B, C, D, E, F> apply(Row row) {
|
||||
return new Fun.Tuple6<A, B, C, D, E, F>(
|
||||
provider.getColumnValue(row, 0, p1),
|
||||
provider.getColumnValue(row, 1, p2),
|
||||
provider.getColumnValue(row, 2, p3),
|
||||
provider.getColumnValue(row, 3, p4),
|
||||
provider.getColumnValue(row, 4, p5),
|
||||
return new Fun.Tuple6<A, B, C, D, E, F>(provider.getColumnValue(row, 0, p1),
|
||||
provider.getColumnValue(row, 1, p2), provider.getColumnValue(row, 2, p3),
|
||||
provider.getColumnValue(row, 3, p4), provider.getColumnValue(row, 4, p5),
|
||||
provider.getColumnValue(row, 5, p6));
|
||||
}
|
||||
}
|
||||
|
||||
public static final class Mapper7<A, B, C, D, E, F, G>
|
||||
implements Function<Row, Fun.Tuple7<A, B, C, D, E, F, G>> {
|
||||
public static final class Mapper7<A, B, C, D, E, F, G> implements Function<Row, Fun.Tuple7<A, B, C, D, E, F, G>> {
|
||||
|
||||
private final ColumnValueProvider provider;
|
||||
private final HelenusProperty p1, p2, p3, p4, p5, p6, p7;
|
||||
|
||||
public Mapper7(
|
||||
ColumnValueProvider provider,
|
||||
HelenusPropertyNode p1,
|
||||
HelenusPropertyNode p2,
|
||||
HelenusPropertyNode p3,
|
||||
HelenusPropertyNode p4,
|
||||
HelenusPropertyNode p5,
|
||||
HelenusPropertyNode p6,
|
||||
public Mapper7(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2,
|
||||
HelenusPropertyNode p3, HelenusPropertyNode p4, HelenusPropertyNode p5, HelenusPropertyNode p6,
|
||||
HelenusPropertyNode p7) {
|
||||
this.provider = provider;
|
||||
this.p1 = p1.getProperty();
|
||||
|
@ -214,14 +177,10 @@ public final class Mappers {
|
|||
|
||||
@Override
|
||||
public Fun.Tuple7<A, B, C, D, E, F, G> apply(Row row) {
|
||||
return new Fun.Tuple7<A, B, C, D, E, F, G>(
|
||||
provider.getColumnValue(row, 0, p1),
|
||||
provider.getColumnValue(row, 1, p2),
|
||||
provider.getColumnValue(row, 2, p3),
|
||||
provider.getColumnValue(row, 3, p4),
|
||||
provider.getColumnValue(row, 4, p5),
|
||||
provider.getColumnValue(row, 5, p6),
|
||||
provider.getColumnValue(row, 6, p7));
|
||||
return new Fun.Tuple7<A, B, C, D, E, F, G>(provider.getColumnValue(row, 0, p1),
|
||||
provider.getColumnValue(row, 1, p2), provider.getColumnValue(row, 2, p3),
|
||||
provider.getColumnValue(row, 3, p4), provider.getColumnValue(row, 4, p5),
|
||||
provider.getColumnValue(row, 5, p6), provider.getColumnValue(row, 6, p7));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
|
|
@ -1,8 +1,10 @@
|
|||
package net.helenus.core;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
import com.datastax.driver.core.querybuilder.Ordering;
|
||||
import com.datastax.driver.core.querybuilder.QueryBuilder;
|
||||
import java.util.Objects;
|
||||
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.mapping.ColumnType;
|
||||
import net.helenus.mapping.MappingUtil;
|
||||
|
@ -32,10 +34,10 @@ public final class Ordered {
|
|||
}
|
||||
|
||||
switch (direction) {
|
||||
case ASC:
|
||||
case ASC :
|
||||
return QueryBuilder.asc(propNode.getColumnName());
|
||||
|
||||
case DESC:
|
||||
case DESC :
|
||||
return QueryBuilder.desc(propNode.getColumnName());
|
||||
}
|
||||
|
||||
|
|
|
@ -2,74 +2,25 @@ package net.helenus.core;
|
|||
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
|
||||
import net.helenus.support.CheckedRunnable;
|
||||
|
||||
public class PostCommitFunction<T, R> implements java.util.function.Function<T, R> {
|
||||
public static final PostCommitFunction<Void, Void> NULL_ABORT = new PostCommitFunction<Void, Void>(null, null, null, false);
|
||||
public static final PostCommitFunction<Void, Void> NULL_COMMIT = new PostCommitFunction<Void, Void>(null, null, null, true);
|
||||
|
||||
private final List<CheckedRunnable> commitThunks;
|
||||
private final List<CheckedRunnable> abortThunks;
|
||||
private Consumer<? super Throwable> exceptionallyThunk;
|
||||
private boolean committed;
|
||||
private final UnitOfWork uow;
|
||||
private final List<CommitThunk> postCommit;
|
||||
|
||||
PostCommitFunction(List<CheckedRunnable> postCommit, List<CheckedRunnable> abortThunks,
|
||||
Consumer<? super Throwable> exceptionallyThunk,
|
||||
boolean committed) {
|
||||
this.commitThunks = postCommit;
|
||||
this.abortThunks = abortThunks;
|
||||
this.exceptionallyThunk = exceptionallyThunk;
|
||||
this.committed = committed;
|
||||
PostCommitFunction(UnitOfWork uow, List<CommitThunk> postCommit) {
|
||||
this.uow = uow;
|
||||
this.postCommit = postCommit;
|
||||
}
|
||||
|
||||
private void apply(CheckedRunnable... fns) {
|
||||
try {
|
||||
for (CheckedRunnable fn : fns) {
|
||||
fn.run();
|
||||
}
|
||||
} catch (Throwable t) {
|
||||
if (exceptionallyThunk != null) {
|
||||
exceptionallyThunk.accept(t);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public PostCommitFunction<T, R> andThen(CheckedRunnable... after) {
|
||||
public void andThen(CommitThunk after) {
|
||||
Objects.requireNonNull(after);
|
||||
if (commitThunks == null) {
|
||||
if (committed) {
|
||||
apply(after);
|
||||
}
|
||||
if (postCommit == null) {
|
||||
after.apply();
|
||||
} else {
|
||||
for (CheckedRunnable fn : after) {
|
||||
commitThunks.add(fn);
|
||||
postCommit.add(after);
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public PostCommitFunction<T, R> orElse(CheckedRunnable... after) {
|
||||
Objects.requireNonNull(after);
|
||||
if (abortThunks == null) {
|
||||
if (!committed) {
|
||||
apply(after);
|
||||
}
|
||||
} else {
|
||||
for (CheckedRunnable fn : after) {
|
||||
abortThunks.add(fn);
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public PostCommitFunction<T, R> exceptionally(Consumer<? super Throwable> fn) {
|
||||
Objects.requireNonNull(fn);
|
||||
exceptionallyThunk = fn;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public R apply(T t) {
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -18,6 +17,7 @@ package net.helenus.core;
|
|||
|
||||
import com.datastax.driver.core.querybuilder.Clause;
|
||||
import com.datastax.driver.core.querybuilder.QueryBuilder;
|
||||
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.mapping.value.ColumnValuePreparer;
|
||||
import net.helenus.support.HelenusMappingException;
|
||||
|
@ -39,34 +39,34 @@ public final class Postulate<V> {
|
|||
public Clause getClause(HelenusPropertyNode node, ColumnValuePreparer valuePreparer) {
|
||||
|
||||
switch (operator) {
|
||||
case EQ:
|
||||
return QueryBuilder.eq(
|
||||
node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty()));
|
||||
case EQ :
|
||||
return QueryBuilder.eq(node.getColumnName(),
|
||||
valuePreparer.prepareColumnValue(values[0], node.getProperty()));
|
||||
|
||||
case IN:
|
||||
case IN :
|
||||
Object[] preparedValues = new Object[values.length];
|
||||
for (int i = 0; i != values.length; ++i) {
|
||||
preparedValues[i] = valuePreparer.prepareColumnValue(values[i], node.getProperty());
|
||||
}
|
||||
return QueryBuilder.in(node.getColumnName(), preparedValues);
|
||||
|
||||
case LT:
|
||||
return QueryBuilder.lt(
|
||||
node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty()));
|
||||
case LT :
|
||||
return QueryBuilder.lt(node.getColumnName(),
|
||||
valuePreparer.prepareColumnValue(values[0], node.getProperty()));
|
||||
|
||||
case LTE:
|
||||
return QueryBuilder.lte(
|
||||
node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty()));
|
||||
case LTE :
|
||||
return QueryBuilder.lte(node.getColumnName(),
|
||||
valuePreparer.prepareColumnValue(values[0], node.getProperty()));
|
||||
|
||||
case GT:
|
||||
return QueryBuilder.gt(
|
||||
node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty()));
|
||||
case GT :
|
||||
return QueryBuilder.gt(node.getColumnName(),
|
||||
valuePreparer.prepareColumnValue(values[0], node.getProperty()));
|
||||
|
||||
case GTE:
|
||||
return QueryBuilder.gte(
|
||||
node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty()));
|
||||
case GTE :
|
||||
return QueryBuilder.gte(node.getColumnName(),
|
||||
valuePreparer.prepareColumnValue(values[0], node.getProperty()));
|
||||
|
||||
default:
|
||||
default :
|
||||
throw new HelenusMappingException("unknown filter operation " + operator);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,17 +15,20 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import com.datastax.driver.core.querybuilder.BindMarker;
|
||||
import com.datastax.driver.core.querybuilder.QueryBuilder;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import com.datastax.driver.core.querybuilder.BindMarker;
|
||||
import com.datastax.driver.core.querybuilder.QueryBuilder;
|
||||
|
||||
import net.helenus.mapping.OrderingDirection;
|
||||
|
||||
/** Sugar methods for the queries */
|
||||
public final class Query {
|
||||
|
||||
private Query() {}
|
||||
private Query() {
|
||||
}
|
||||
|
||||
public static BindMarker marker() {
|
||||
return QueryBuilder.bindMarker();
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,14 +15,16 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import com.datastax.driver.core.*;
|
||||
import com.datastax.driver.core.querybuilder.IsNotNullClause;
|
||||
import com.datastax.driver.core.querybuilder.QueryBuilder;
|
||||
import com.datastax.driver.core.querybuilder.Select;
|
||||
import com.datastax.driver.core.schemabuilder.*;
|
||||
import com.datastax.driver.core.schemabuilder.Create.Options;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.mapping.*;
|
||||
import net.helenus.mapping.ColumnType;
|
||||
|
@ -34,7 +35,8 @@ import net.helenus.support.HelenusMappingException;
|
|||
|
||||
public final class SchemaUtil {
|
||||
|
||||
private SchemaUtil() {}
|
||||
private SchemaUtil() {
|
||||
}
|
||||
|
||||
public static RegularStatement use(String keyspace, boolean forceQuote) {
|
||||
if (forceQuote) {
|
||||
|
@ -57,31 +59,23 @@ public final class SchemaUtil {
|
|||
ColumnType columnType = prop.getColumnType();
|
||||
|
||||
if (columnType == ColumnType.PARTITION_KEY || columnType == ColumnType.CLUSTERING_COLUMN) {
|
||||
throw new HelenusMappingException(
|
||||
"primary key columns are not supported in UserDefinedType for "
|
||||
+ prop.getPropertyName()
|
||||
+ " in entity "
|
||||
+ entity);
|
||||
throw new HelenusMappingException("primary key columns are not supported in UserDefinedType for "
|
||||
+ prop.getPropertyName() + " in entity " + entity);
|
||||
}
|
||||
|
||||
try {
|
||||
prop.getDataType().addColumn(create, prop.getColumnName());
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new HelenusMappingException(
|
||||
"invalid column name '"
|
||||
+ prop.getColumnName()
|
||||
+ "' in entity '"
|
||||
+ entity.getName().getName()
|
||||
+ "'",
|
||||
e);
|
||||
throw new HelenusMappingException("invalid column name '" + prop.getColumnName() + "' in entity '"
|
||||
+ entity.getName().getName() + "'", e);
|
||||
}
|
||||
}
|
||||
|
||||
return create;
|
||||
}
|
||||
|
||||
public static List<SchemaStatement> alterUserType(
|
||||
UserType userType, HelenusEntity entity, boolean dropUnusedColumns) {
|
||||
public static List<SchemaStatement> alterUserType(UserType userType, HelenusEntity entity,
|
||||
boolean dropUnusedColumns) {
|
||||
|
||||
if (entity.getType() != HelenusEntityType.UDT) {
|
||||
throw new HelenusMappingException("expected UDT entity " + entity);
|
||||
|
@ -90,13 +84,12 @@ public final class SchemaUtil {
|
|||
List<SchemaStatement> result = new ArrayList<SchemaStatement>();
|
||||
|
||||
/**
|
||||
* TODO: In future replace SchemaBuilder.alterTable by SchemaBuilder.alterType when it will
|
||||
* exist
|
||||
* TODO: In future replace SchemaBuilder.alterTable by SchemaBuilder.alterType
|
||||
* when it will exist
|
||||
*/
|
||||
Alter alter = SchemaBuilder.alterTable(entity.getName().toCql());
|
||||
|
||||
final Set<String> visitedColumns =
|
||||
dropUnusedColumns ? new HashSet<String>() : Collections.<String>emptySet();
|
||||
final Set<String> visitedColumns = dropUnusedColumns ? new HashSet<String>() : Collections.<String>emptySet();
|
||||
|
||||
for (HelenusProperty prop : entity.getOrderedProperties()) {
|
||||
|
||||
|
@ -113,9 +106,8 @@ public final class SchemaUtil {
|
|||
}
|
||||
|
||||
DataType dataType = userType.getFieldType(columnName);
|
||||
SchemaStatement stmt =
|
||||
prop.getDataType()
|
||||
.alterColumn(alter, prop.getColumnName(), optional(columnName, dataType));
|
||||
SchemaStatement stmt = prop.getDataType().alterColumn(alter, prop.getColumnName(),
|
||||
optional(columnName, dataType));
|
||||
|
||||
if (stmt != null) {
|
||||
result.add(stmt);
|
||||
|
@ -155,44 +147,32 @@ public final class SchemaUtil {
|
|||
for (HelenusProperty prop : properties) {
|
||||
String columnName = prop.getColumnName().toCql();
|
||||
switch (prop.getColumnType()) {
|
||||
case PARTITION_KEY:
|
||||
case PARTITION_KEY :
|
||||
p.add(columnName);
|
||||
break;
|
||||
case CLUSTERING_COLUMN:
|
||||
case CLUSTERING_COLUMN :
|
||||
c.add(columnName);
|
||||
break;
|
||||
default:
|
||||
default :
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (p.size() == 0 && c.size() == 0)
|
||||
return "{"
|
||||
+ properties
|
||||
.stream()
|
||||
.map(HelenusProperty::getPropertyName)
|
||||
.collect(Collectors.joining(", "))
|
||||
+ "}";
|
||||
|
||||
return "("
|
||||
+ ((p.size() > 1) ? "(" + String.join(", ", p) + ")" : p.get(0))
|
||||
+ ((c.size() > 0)
|
||||
? ", " + ((c.size() > 1) ? "(" + String.join(", ", c) + ")" : c.get(0))
|
||||
: "")
|
||||
+ ")";
|
||||
return "(" + ((p.size() > 1) ? "(" + String.join(", ", p) + ")" : p.get(0))
|
||||
+ ((c.size() > 0) ? ", " + ((c.size() > 1) ? "(" + String.join(", ", c) + ")" : c.get(0)) : "") + ")";
|
||||
}
|
||||
|
||||
public static SchemaStatement createMaterializedView(
|
||||
String keyspace, String viewName, HelenusEntity entity) {
|
||||
public static SchemaStatement createMaterializedView(String keyspace, String viewName, HelenusEntity entity) {
|
||||
if (entity.getType() != HelenusEntityType.VIEW) {
|
||||
throw new HelenusMappingException("expected view entity " + entity);
|
||||
}
|
||||
|
||||
if (entity == null) {
|
||||
throw new HelenusMappingException("no entity or table to select data");
|
||||
}
|
||||
|
||||
List<HelenusPropertyNode> props = new ArrayList<HelenusPropertyNode>();
|
||||
entity
|
||||
.getOrderedProperties()
|
||||
.stream()
|
||||
.map(p -> new HelenusPropertyNode(p, Optional.empty()))
|
||||
entity.getOrderedProperties().stream().map(p -> new HelenusPropertyNode(p, Optional.empty()))
|
||||
.forEach(p -> props.add(p));
|
||||
|
||||
Select.Selection selection = QueryBuilder.select();
|
||||
|
@ -209,20 +189,20 @@ public final class SchemaUtil {
|
|||
for (HelenusPropertyNode prop : props) {
|
||||
String columnName = prop.getColumnName();
|
||||
switch (prop.getProperty().getColumnType()) {
|
||||
case PARTITION_KEY:
|
||||
case PARTITION_KEY :
|
||||
where = where.and(new IsNotNullClause(columnName));
|
||||
break;
|
||||
|
||||
case CLUSTERING_COLUMN:
|
||||
case CLUSTERING_COLUMN :
|
||||
where = where.and(new IsNotNullClause(columnName));
|
||||
|
||||
ClusteringColumn clusteringColumn =
|
||||
prop.getProperty().getGetterMethod().getAnnotation(ClusteringColumn.class);
|
||||
ClusteringColumn clusteringColumn = prop.getProperty().getGetterMethod()
|
||||
.getAnnotation(ClusteringColumn.class);
|
||||
if (clusteringColumn != null && clusteringColumn.ordering() != null) {
|
||||
o.add(columnName + " " + clusteringColumn.ordering().cql());
|
||||
}
|
||||
break;
|
||||
default:
|
||||
default :
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -233,12 +213,10 @@ public final class SchemaUtil {
|
|||
if (o.size() > 0) {
|
||||
clustering = "WITH CLUSTERING ORDER BY (" + String.join(", ", o) + ")";
|
||||
}
|
||||
return new CreateMaterializedView(keyspace, viewName, where, primaryKey, clustering)
|
||||
.ifNotExists();
|
||||
return new CreateMaterializedView(keyspace, viewName, where, primaryKey, clustering).ifNotExists();
|
||||
}
|
||||
|
||||
public static SchemaStatement dropMaterializedView(
|
||||
String keyspace, String viewName, HelenusEntity entity) {
|
||||
public static SchemaStatement dropMaterializedView(String keyspace, String viewName, HelenusEntity entity) {
|
||||
return new DropMaterializedView(keyspace, viewName);
|
||||
}
|
||||
|
||||
|
@ -271,15 +249,14 @@ public final class SchemaUtil {
|
|||
|
||||
if (!clusteringColumns.isEmpty()) {
|
||||
Options options = create.withOptions();
|
||||
clusteringColumns.forEach(
|
||||
p -> options.clusteringOrder(p.getColumnName().toCql(), mapDirection(p.getOrdering())));
|
||||
clusteringColumns
|
||||
.forEach(p -> options.clusteringOrder(p.getColumnName().toCql(), mapDirection(p.getOrdering())));
|
||||
}
|
||||
|
||||
return create;
|
||||
}
|
||||
|
||||
public static List<SchemaStatement> alterTable(
|
||||
TableMetadata tmd, HelenusEntity entity, boolean dropUnusedColumns) {
|
||||
public static List<SchemaStatement> alterTable(TableMetadata tmd, HelenusEntity entity, boolean dropUnusedColumns) {
|
||||
|
||||
if (entity.getType() != HelenusEntityType.TABLE) {
|
||||
throw new HelenusMappingException("expected table entity " + entity);
|
||||
|
@ -289,8 +266,7 @@ public final class SchemaUtil {
|
|||
|
||||
Alter alter = SchemaBuilder.alterTable(entity.getName().toCql());
|
||||
|
||||
final Set<String> visitedColumns =
|
||||
dropUnusedColumns ? new HashSet<String>() : Collections.<String>emptySet();
|
||||
final Set<String> visitedColumns = dropUnusedColumns ? new HashSet<String>() : Collections.<String>emptySet();
|
||||
|
||||
for (HelenusProperty prop : entity.getOrderedProperties()) {
|
||||
|
||||
|
@ -307,8 +283,8 @@ public final class SchemaUtil {
|
|||
}
|
||||
|
||||
ColumnMetadata columnMetadata = tmd.getColumn(columnName);
|
||||
SchemaStatement stmt =
|
||||
prop.getDataType().alterColumn(alter, prop.getColumnName(), optional(columnMetadata));
|
||||
SchemaStatement stmt = prop.getDataType().alterColumn(alter, prop.getColumnName(),
|
||||
optional(columnMetadata));
|
||||
|
||||
if (stmt != null) {
|
||||
result.add(stmt);
|
||||
|
@ -338,42 +314,28 @@ public final class SchemaUtil {
|
|||
|
||||
public static SchemaStatement createIndex(HelenusProperty prop) {
|
||||
if (prop.caseSensitiveIndex()) {
|
||||
return SchemaBuilder.createIndex(indexName(prop))
|
||||
.ifNotExists()
|
||||
.onTable(prop.getEntity().getName().toCql())
|
||||
.andColumn(prop.getColumnName().toCql());
|
||||
return SchemaBuilder.createIndex(prop.getIndexName().get().toCql()).ifNotExists()
|
||||
.onTable(prop.getEntity().getName().toCql()).andColumn(prop.getColumnName().toCql());
|
||||
} else {
|
||||
return new CreateSasiIndex(prop.getIndexName().get().toCql())
|
||||
.ifNotExists()
|
||||
.onTable(prop.getEntity().getName().toCql())
|
||||
.andColumn(prop.getColumnName().toCql());
|
||||
return new CreateSasiIndex(prop.getIndexName().get().toCql()).ifNotExists()
|
||||
.onTable(prop.getEntity().getName().toCql()).andColumn(prop.getColumnName().toCql());
|
||||
}
|
||||
}
|
||||
|
||||
public static List<SchemaStatement> createIndexes(HelenusEntity entity) {
|
||||
|
||||
return entity
|
||||
.getOrderedProperties()
|
||||
.stream()
|
||||
.filter(p -> p.getIndexName().isPresent())
|
||||
.map(p -> SchemaUtil.createIndex(p))
|
||||
.collect(Collectors.toList());
|
||||
return entity.getOrderedProperties().stream().filter(p -> p.getIndexName().isPresent())
|
||||
.map(p -> SchemaUtil.createIndex(p)).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
public static List<SchemaStatement> alterIndexes(
|
||||
TableMetadata tmd, HelenusEntity entity, boolean dropUnusedIndexes) {
|
||||
public static List<SchemaStatement> alterIndexes(TableMetadata tmd, HelenusEntity entity,
|
||||
boolean dropUnusedIndexes) {
|
||||
|
||||
List<SchemaStatement> list = new ArrayList<SchemaStatement>();
|
||||
|
||||
final Set<String> visitedColumns =
|
||||
dropUnusedIndexes ? new HashSet<String>() : Collections.<String>emptySet();
|
||||
final Set<String> visitedColumns = dropUnusedIndexes ? new HashSet<String>() : Collections.<String>emptySet();
|
||||
|
||||
entity
|
||||
.getOrderedProperties()
|
||||
.stream()
|
||||
.filter(p -> p.getIndexName().isPresent())
|
||||
.forEach(
|
||||
p -> {
|
||||
entity.getOrderedProperties().stream().filter(p -> p.getIndexName().isPresent()).forEach(p -> {
|
||||
String columnName = p.getColumnName().getName();
|
||||
|
||||
if (dropUnusedIndexes) {
|
||||
|
@ -394,11 +356,9 @@ public final class SchemaUtil {
|
|||
|
||||
if (dropUnusedIndexes) {
|
||||
|
||||
tmd.getColumns()
|
||||
.stream()
|
||||
tmd.getColumns().stream()
|
||||
.filter(c -> tmd.getIndex(c.getName()) != null && !visitedColumns.contains(c.getName()))
|
||||
.forEach(
|
||||
c -> {
|
||||
.forEach(c -> {
|
||||
list.add(SchemaBuilder.dropIndex(tmd.getIndex(c.getName()).getName()).ifExists());
|
||||
});
|
||||
}
|
||||
|
@ -407,14 +367,14 @@ public final class SchemaUtil {
|
|||
}
|
||||
|
||||
public static SchemaStatement dropIndex(HelenusProperty prop) {
|
||||
return SchemaBuilder.dropIndex(indexName(prop)).ifExists();
|
||||
return SchemaBuilder.dropIndex(prop.getIndexName().get().toCql()).ifExists();
|
||||
}
|
||||
|
||||
private static SchemaBuilder.Direction mapDirection(OrderingDirection o) {
|
||||
switch (o) {
|
||||
case ASC:
|
||||
case ASC :
|
||||
return SchemaBuilder.Direction.ASC;
|
||||
case DESC:
|
||||
case DESC :
|
||||
return SchemaBuilder.Direction.DESC;
|
||||
}
|
||||
throw new HelenusMappingException("unknown ordering " + o);
|
||||
|
@ -424,10 +384,7 @@ public final class SchemaUtil {
|
|||
|
||||
throw new HelenusMappingException(
|
||||
"only primitive types and Set,List,Map collections and UserDefinedTypes are allowed, unknown type for property '"
|
||||
+ prop.getPropertyName()
|
||||
+ "' type is '"
|
||||
+ prop.getJavaType()
|
||||
+ "' in the entity "
|
||||
+ prop.getPropertyName() + "' type is '" + prop.getJavaType() + "' in the entity "
|
||||
+ prop.getEntity());
|
||||
}
|
||||
|
||||
|
@ -466,9 +423,4 @@ public final class SchemaUtil {
|
|||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private static String indexName(HelenusProperty prop) {
|
||||
return prop.getEntity().getName().toCql() + "_" + prop.getIndexName().get().toCql();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,16 +15,18 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import com.codahale.metrics.MetricRegistry;
|
||||
import com.datastax.driver.core.*;
|
||||
import com.google.common.util.concurrent.MoreExecutors;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintStream;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.Executor;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.function.Consumer;
|
||||
import javax.cache.CacheManager;
|
||||
|
||||
import com.codahale.metrics.MetricRegistry;
|
||||
import com.datastax.driver.core.*;
|
||||
import com.google.common.util.concurrent.MoreExecutors;
|
||||
|
||||
import brave.Tracer;
|
||||
import net.helenus.core.reflect.DslExportable;
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.mapping.HelenusEntityType;
|
||||
|
@ -43,26 +44,18 @@ public final class SessionInitializer extends AbstractSessionOperations {
|
|||
private CodecRegistry registry;
|
||||
private String usingKeyspace;
|
||||
private boolean showCql = false;
|
||||
private boolean showValues = true;
|
||||
private ConsistencyLevel consistencyLevel;
|
||||
private boolean idempotent = false;
|
||||
private boolean idempotent = true;
|
||||
private MetricRegistry metricRegistry = new MetricRegistry();
|
||||
private Tracer zipkinTracer;
|
||||
private PrintStream printStream = System.out;
|
||||
private Executor executor = MoreExecutors.directExecutor();
|
||||
private Class<? extends UnitOfWork> unitOfWorkClass = UnitOfWorkImpl.class;
|
||||
private SessionRepositoryBuilder sessionRepository;
|
||||
private boolean dropUnusedColumns = false;
|
||||
private boolean dropUnusedIndexes = false;
|
||||
private KeyspaceMetadata keyspaceMetadata;
|
||||
private AutoDdl autoDdl = AutoDdl.UPDATE;
|
||||
private CacheManager cacheManager = null;
|
||||
|
||||
SessionInitializer(Session session, String keyspace) {
|
||||
this.session = session;
|
||||
this.usingKeyspace = keyspace;
|
||||
if (session != null) {
|
||||
this.sessionRepository = new SessionRepositoryBuilder(session);
|
||||
}
|
||||
}
|
||||
|
||||
SessionInitializer(Session session) {
|
||||
this.session = Objects.requireNonNull(session, "empty session");
|
||||
|
@ -110,44 +103,30 @@ public final class SessionInitializer extends AbstractSessionOperations {
|
|||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer showQueryValuesInLog(boolean showValues) {
|
||||
this.showValues = showValues;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer showQueryValuesInLog() {
|
||||
this.showValues = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
public boolean showValues() {
|
||||
return showValues;
|
||||
}
|
||||
|
||||
public SessionInitializer metricRegistry(MetricRegistry metricRegistry) {
|
||||
this.metricRegistry = metricRegistry;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer zipkinTracer(Tracer tracer) {
|
||||
this.zipkinTracer = tracer;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer setUnitOfWorkClass(Class<? extends UnitOfWork> e) {
|
||||
this.unitOfWorkClass = e;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer consistencyLevel(ConsistencyLevel consistencyLevel) {
|
||||
this.consistencyLevel = consistencyLevel;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer setCacheManager(CacheManager cacheManager) {
|
||||
this.cacheManager = cacheManager;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ConsistencyLevel getDefaultConsistencyLevel() {
|
||||
return consistencyLevel;
|
||||
}
|
||||
|
||||
public SessionInitializer setOperationsIdempotentByDefault() {
|
||||
this.idempotent = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer idempotentQueryExecution(boolean idempotent) {
|
||||
this.idempotent = idempotent;
|
||||
return this;
|
||||
|
@ -200,11 +179,8 @@ public final class SessionInitializer extends AbstractSessionOperations {
|
|||
|
||||
public SessionInitializer addPackage(String packageName) {
|
||||
try {
|
||||
PackageUtil.getClasses(packageName)
|
||||
.stream()
|
||||
.filter(c -> c.isInterface() && !c.isAnnotation())
|
||||
.forEach(
|
||||
clazz -> {
|
||||
PackageUtil.getClasses(packageName).stream().filter(c -> c.isInterface() && !c.isAnnotation())
|
||||
.forEach(clazz -> {
|
||||
initList.add(Either.right(clazz));
|
||||
});
|
||||
} catch (IOException | ClassNotFoundException e) {
|
||||
|
@ -249,10 +225,8 @@ public final class SessionInitializer extends AbstractSessionOperations {
|
|||
}
|
||||
|
||||
public SessionInitializer use(String keyspace) {
|
||||
if (session != null) {
|
||||
session.execute(SchemaUtil.use(keyspace, false));
|
||||
this.usingKeyspace = keyspace;
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -268,28 +242,16 @@ public final class SessionInitializer extends AbstractSessionOperations {
|
|||
|
||||
public synchronized HelenusSession get() {
|
||||
initialize();
|
||||
return new HelenusSession(
|
||||
session,
|
||||
usingKeyspace,
|
||||
registry,
|
||||
showCql,
|
||||
showValues,
|
||||
printStream,
|
||||
sessionRepository,
|
||||
executor,
|
||||
autoDdl == AutoDdl.CREATE_DROP,
|
||||
consistencyLevel,
|
||||
idempotent,
|
||||
cacheManager,
|
||||
metricRegistry);
|
||||
return new HelenusSession(session, usingKeyspace, registry, showCql, printStream, sessionRepository, executor,
|
||||
autoDdl == AutoDdl.CREATE_DROP, consistencyLevel, idempotent, unitOfWorkClass, metricRegistry,
|
||||
zipkinTracer);
|
||||
}
|
||||
|
||||
private void initialize() {
|
||||
|
||||
Objects.requireNonNull(usingKeyspace, "please define keyspace by 'use' operator");
|
||||
|
||||
initList.forEach(
|
||||
(either) -> {
|
||||
initList.forEach((either) -> {
|
||||
Class<?> iface = null;
|
||||
if (either.isLeft()) {
|
||||
iface = MappingUtil.getMappingInterface(either.getLeft());
|
||||
|
@ -298,91 +260,61 @@ public final class SessionInitializer extends AbstractSessionOperations {
|
|||
}
|
||||
|
||||
DslExportable dsl = (DslExportable) Helenus.dsl(iface);
|
||||
if (session != null) {
|
||||
dsl.setCassandraMetadataForHelenusSession(session.getCluster().getMetadata());
|
||||
}
|
||||
if (sessionRepository != null) {
|
||||
sessionRepository.add(dsl);
|
||||
}
|
||||
});
|
||||
|
||||
if (session == null) return;
|
||||
|
||||
TableOperations tableOps = new TableOperations(this, dropUnusedColumns, dropUnusedIndexes);
|
||||
UserTypeOperations userTypeOps = new UserTypeOperations(this, dropUnusedColumns);
|
||||
|
||||
switch (autoDdl) {
|
||||
case CREATE_DROP:
|
||||
case CREATE_DROP :
|
||||
|
||||
// Drop view first, otherwise a `DROP TABLE ...` will fail as the type is still
|
||||
// referenced
|
||||
// by a view.
|
||||
sessionRepository
|
||||
.entities()
|
||||
.stream()
|
||||
.filter(e -> e.getType() == HelenusEntityType.VIEW)
|
||||
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.VIEW)
|
||||
.forEach(e -> tableOps.dropView(e));
|
||||
|
||||
// Drop tables second, before DROP TYPE otherwise a `DROP TYPE ...` will fail as
|
||||
// the type is
|
||||
// still referenced by a table.
|
||||
sessionRepository
|
||||
.entities()
|
||||
.stream()
|
||||
.filter(e -> e.getType() == HelenusEntityType.TABLE)
|
||||
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.TABLE)
|
||||
.forEach(e -> tableOps.dropTable(e));
|
||||
|
||||
eachUserTypeInReverseOrder(userTypeOps, e -> userTypeOps.dropUserType(e));
|
||||
|
||||
// FALLTHRU to CREATE case (read: the absence of a `break;` statement here is
|
||||
// intentional!)
|
||||
case CREATE:
|
||||
case CREATE :
|
||||
eachUserTypeInOrder(userTypeOps, e -> userTypeOps.createUserType(e));
|
||||
|
||||
sessionRepository
|
||||
.entities()
|
||||
.stream()
|
||||
.filter(e -> e.getType() == HelenusEntityType.TABLE)
|
||||
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.TABLE)
|
||||
.forEach(e -> tableOps.createTable(e));
|
||||
|
||||
sessionRepository
|
||||
.entities()
|
||||
.stream()
|
||||
.filter(e -> e.getType() == HelenusEntityType.VIEW)
|
||||
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.VIEW)
|
||||
.forEach(e -> tableOps.createView(e));
|
||||
|
||||
break;
|
||||
|
||||
case VALIDATE:
|
||||
case VALIDATE :
|
||||
eachUserTypeInOrder(userTypeOps, e -> userTypeOps.validateUserType(getUserType(e), e));
|
||||
|
||||
sessionRepository
|
||||
.entities()
|
||||
.stream()
|
||||
.filter(e -> e.getType() == HelenusEntityType.TABLE)
|
||||
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.TABLE)
|
||||
.forEach(e -> tableOps.validateTable(getTableMetadata(e), e));
|
||||
|
||||
break;
|
||||
|
||||
case UPDATE:
|
||||
case UPDATE :
|
||||
eachUserTypeInOrder(userTypeOps, e -> userTypeOps.updateUserType(getUserType(e), e));
|
||||
|
||||
sessionRepository
|
||||
.entities()
|
||||
.stream()
|
||||
.filter(e -> e.getType() == HelenusEntityType.VIEW)
|
||||
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.VIEW)
|
||||
.forEach(e -> tableOps.dropView(e));
|
||||
|
||||
sessionRepository
|
||||
.entities()
|
||||
.stream()
|
||||
.filter(e -> e.getType() == HelenusEntityType.TABLE)
|
||||
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.TABLE)
|
||||
.forEach(e -> tableOps.updateTable(getTableMetadata(e), e));
|
||||
|
||||
sessionRepository
|
||||
.entities()
|
||||
.stream()
|
||||
.filter(e -> e.getType() == HelenusEntityType.VIEW)
|
||||
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.VIEW)
|
||||
.forEach(e -> tableOps.createView(e));
|
||||
break;
|
||||
}
|
||||
|
@ -394,41 +326,27 @@ public final class SessionInitializer extends AbstractSessionOperations {
|
|||
}
|
||||
}
|
||||
|
||||
private void eachUserTypeInOrder(
|
||||
UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) {
|
||||
private void eachUserTypeInOrder(UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) {
|
||||
|
||||
Set<HelenusEntity> processedSet = new HashSet<HelenusEntity>();
|
||||
Set<HelenusEntity> stack = new HashSet<HelenusEntity>();
|
||||
|
||||
sessionRepository
|
||||
.entities()
|
||||
.stream()
|
||||
.filter(e -> e.getType() == HelenusEntityType.UDT)
|
||||
.forEach(
|
||||
e -> {
|
||||
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.UDT).forEach(e -> {
|
||||
stack.clear();
|
||||
eachUserTypeInRecursion(e, processedSet, stack, userTypeOps, action);
|
||||
});
|
||||
}
|
||||
|
||||
private void eachUserTypeInReverseOrder(
|
||||
UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) {
|
||||
private void eachUserTypeInReverseOrder(UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) {
|
||||
ArrayDeque<HelenusEntity> deque = new ArrayDeque<>();
|
||||
eachUserTypeInOrder(userTypeOps, e -> deque.addFirst(e));
|
||||
deque
|
||||
.stream()
|
||||
.forEach(
|
||||
e -> {
|
||||
deque.stream().forEach(e -> {
|
||||
action.accept(e);
|
||||
});
|
||||
}
|
||||
|
||||
private void eachUserTypeInRecursion(
|
||||
HelenusEntity e,
|
||||
Set<HelenusEntity> processedSet,
|
||||
Set<HelenusEntity> stack,
|
||||
UserTypeOperations userTypeOps,
|
||||
Consumer<? super HelenusEntity> action) {
|
||||
private void eachUserTypeInRecursion(HelenusEntity e, Set<HelenusEntity> processedSet, Set<HelenusEntity> stack,
|
||||
UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) {
|
||||
|
||||
stack.add(e);
|
||||
|
||||
|
@ -449,8 +367,7 @@ public final class SessionInitializer extends AbstractSessionOperations {
|
|||
|
||||
private KeyspaceMetadata getKeyspaceMetadata() {
|
||||
if (keyspaceMetadata == null) {
|
||||
keyspaceMetadata =
|
||||
session.getCluster().getMetadata().getKeyspace(usingKeyspace.toLowerCase());
|
||||
keyspaceMetadata = session.getCluster().getMetadata().getKeyspace(usingKeyspace.toLowerCase());
|
||||
}
|
||||
return keyspaceMetadata;
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,9 +15,11 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import java.util.Collection;
|
||||
|
||||
import com.datastax.driver.core.UserType;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import java.util.Collection;
|
||||
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
|
||||
public final class SessionRepository {
|
||||
|
@ -31,8 +32,7 @@ public final class SessionRepository {
|
|||
|
||||
userTypeMap = ImmutableMap.<String, UserType>builder().putAll(builder.getUserTypeMap()).build();
|
||||
|
||||
entityMap =
|
||||
ImmutableMap.<Class<?>, HelenusEntity>builder().putAll(builder.getEntityMap()).build();
|
||||
entityMap = ImmutableMap.<Class<?>, HelenusEntity>builder().putAll(builder.getEntityMap()).build();
|
||||
}
|
||||
|
||||
public UserType findUserType(String name) {
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,15 +15,17 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
import com.datastax.driver.core.Session;
|
||||
import com.datastax.driver.core.UDTValue;
|
||||
import com.datastax.driver.core.UserType;
|
||||
import com.google.common.collect.HashMultimap;
|
||||
import com.google.common.collect.Multimap;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.mapping.HelenusEntityType;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
|
@ -34,8 +35,7 @@ import net.helenus.support.HelenusMappingException;
|
|||
|
||||
public final class SessionRepositoryBuilder {
|
||||
|
||||
private static final Optional<HelenusEntityType> OPTIONAL_UDT =
|
||||
Optional.of(HelenusEntityType.UDT);
|
||||
private static final Optional<HelenusEntityType> OPTIONAL_UDT = Optional.of(HelenusEntityType.UDT);
|
||||
|
||||
private final Map<Class<?>, HelenusEntity> entityMap = new HashMap<Class<?>, HelenusEntity>();
|
||||
|
||||
|
@ -99,8 +99,7 @@ public final class SessionRepositoryBuilder {
|
|||
entity = helenusEntity;
|
||||
|
||||
if (type.isPresent() && entity.getType() != type.get()) {
|
||||
throw new HelenusMappingException(
|
||||
"unexpected entity type " + entity.getType() + " for " + entity);
|
||||
throw new HelenusMappingException("unexpected entity type " + entity.getType() + " for " + entity);
|
||||
}
|
||||
|
||||
HelenusEntity concurrentEntity = entityMap.putIfAbsent(iface, entity);
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,9 +15,11 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import com.datastax.driver.core.TableMetadata;
|
||||
import com.datastax.driver.core.schemabuilder.SchemaStatement;
|
||||
import java.util.List;
|
||||
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.support.HelenusException;
|
||||
|
||||
|
@ -28,30 +29,26 @@ public final class TableOperations {
|
|||
private final boolean dropUnusedColumns;
|
||||
private final boolean dropUnusedIndexes;
|
||||
|
||||
public TableOperations(
|
||||
AbstractSessionOperations sessionOps, boolean dropUnusedColumns, boolean dropUnusedIndexes) {
|
||||
public TableOperations(AbstractSessionOperations sessionOps, boolean dropUnusedColumns, boolean dropUnusedIndexes) {
|
||||
this.sessionOps = sessionOps;
|
||||
this.dropUnusedColumns = dropUnusedColumns;
|
||||
this.dropUnusedIndexes = dropUnusedIndexes;
|
||||
}
|
||||
|
||||
public void createTable(HelenusEntity entity) {
|
||||
sessionOps.execute(SchemaUtil.createTable(entity));
|
||||
sessionOps.execute(SchemaUtil.createTable(entity), true);
|
||||
executeBatch(SchemaUtil.createIndexes(entity));
|
||||
}
|
||||
|
||||
public void dropTable(HelenusEntity entity) {
|
||||
sessionOps.execute(SchemaUtil.dropTable(entity));
|
||||
sessionOps.execute(SchemaUtil.dropTable(entity), true);
|
||||
}
|
||||
|
||||
public void validateTable(TableMetadata tmd, HelenusEntity entity) {
|
||||
|
||||
if (tmd == null) {
|
||||
throw new HelenusException(
|
||||
"table does not exists "
|
||||
+ entity.getName()
|
||||
+ "for entity "
|
||||
+ entity.getMappingInterface());
|
||||
"table does not exists " + entity.getName() + "for entity " + entity.getMappingInterface());
|
||||
}
|
||||
|
||||
List<SchemaStatement> list = SchemaUtil.alterTable(tmd, entity, dropUnusedColumns);
|
||||
|
@ -60,10 +57,7 @@ public final class TableOperations {
|
|||
|
||||
if (!list.isEmpty()) {
|
||||
throw new HelenusException(
|
||||
"schema changed for entity "
|
||||
+ entity.getMappingInterface()
|
||||
+ ", apply this command: "
|
||||
+ list);
|
||||
"schema changed for entity " + entity.getMappingInterface() + ", apply this command: " + list);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -79,15 +73,14 @@ public final class TableOperations {
|
|||
|
||||
public void createView(HelenusEntity entity) {
|
||||
sessionOps.execute(
|
||||
SchemaUtil.createMaterializedView(
|
||||
sessionOps.usingKeyspace(), entity.getName().toCql(), entity));
|
||||
// executeBatch(SchemaUtil.createIndexes(entity)); NOTE: Unfortunately C* 3.10 does not yet support 2i on materialized views.
|
||||
SchemaUtil.createMaterializedView(sessionOps.usingKeyspace(), entity.getName().toCql(), entity), true);
|
||||
// executeBatch(SchemaUtil.createIndexes(entity)); NOTE: Unfortunately C* 3.10
|
||||
// does not yet support 2i on materialized views.
|
||||
}
|
||||
|
||||
public void dropView(HelenusEntity entity) {
|
||||
sessionOps.execute(
|
||||
SchemaUtil.dropMaterializedView(
|
||||
sessionOps.usingKeyspace(), entity.getName().toCql(), entity));
|
||||
SchemaUtil.dropMaterializedView(sessionOps.usingKeyspace(), entity.getName().toCql(), entity), true);
|
||||
}
|
||||
|
||||
public void updateView(TableMetadata tmd, HelenusEntity entity) {
|
||||
|
@ -102,6 +95,8 @@ public final class TableOperations {
|
|||
|
||||
private void executeBatch(List<SchemaStatement> list) {
|
||||
|
||||
list.forEach(s -> sessionOps.execute(s));
|
||||
list.forEach(s -> {
|
||||
sessionOps.execute(s, true);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,138 +15,14 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import static net.helenus.core.HelenusSession.deleted;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import com.google.common.base.Stopwatch;
|
||||
import com.google.common.collect.HashBasedTable;
|
||||
import com.google.common.collect.Table;
|
||||
import com.google.common.collect.TreeTraverser;
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.stream.Collectors;
|
||||
import javax.cache.Cache;
|
||||
import javax.cache.CacheManager;
|
||||
import javax.cache.configuration.CacheEntryListenerConfiguration;
|
||||
import javax.cache.configuration.Configuration;
|
||||
import javax.cache.integration.CacheLoader;
|
||||
import javax.cache.integration.CacheLoaderException;
|
||||
import javax.cache.integration.CompletionListener;
|
||||
import javax.cache.processor.EntryProcessor;
|
||||
import javax.cache.processor.EntryProcessorException;
|
||||
import javax.cache.processor.EntryProcessorResult;
|
||||
|
||||
import net.helenus.core.cache.CacheUtil;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.core.cache.MapCache;
|
||||
import net.helenus.core.operation.AbstractOperation;
|
||||
import net.helenus.core.operation.BatchOperation;
|
||||
import net.helenus.mapping.MappingUtil;
|
||||
import net.helenus.support.CheckedRunnable;
|
||||
import net.helenus.support.Either;
|
||||
import net.helenus.support.HelenusException;
|
||||
import org.apache.commons.lang3.SerializationUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/** Encapsulates the concept of a "transaction" as a unit-of-work. */
|
||||
public class UnitOfWork implements AutoCloseable {
|
||||
private static final Logger LOG = LoggerFactory.getLogger(UnitOfWork.class);
|
||||
|
||||
public final UnitOfWork parent;
|
||||
protected final List<UnitOfWork> nested = new ArrayList<>();
|
||||
protected final Table<String, String, Either<Object, List<Facet>>> cache = HashBasedTable.create();
|
||||
protected final EvictTrackingMapCache<String, Object> statementCache;
|
||||
protected final HelenusSession session;
|
||||
protected String purpose;
|
||||
protected List<String> nestedPurposes = new ArrayList<String>();
|
||||
protected String info;
|
||||
protected int cacheHits = 0;
|
||||
protected int cacheMisses = 0;
|
||||
protected int databaseLookups = 0;
|
||||
protected final Stopwatch elapsedTime;
|
||||
protected Map<String, Double> databaseTime = new HashMap<>();
|
||||
protected double cacheLookupTimeMSecs = 0.0;
|
||||
private List<CheckedRunnable> commitThunks = new ArrayList<>();
|
||||
private List<CheckedRunnable> abortThunks = new ArrayList<>();
|
||||
private Consumer<? super Throwable> exceptionallyThunk;
|
||||
private List<CompletableFuture<?>> asyncOperationFutures = new ArrayList<CompletableFuture<?>>();
|
||||
private boolean aborted = false;
|
||||
private boolean committed = false;
|
||||
private long committedAt = 0L;
|
||||
private BatchOperation batch;
|
||||
|
||||
public UnitOfWork(HelenusSession session) {
|
||||
this(session, null);
|
||||
}
|
||||
|
||||
public UnitOfWork(HelenusSession session, UnitOfWork parent) {
|
||||
Objects.requireNonNull(session, "containing session cannot be null");
|
||||
|
||||
this.parent = parent;
|
||||
if (parent != null) {
|
||||
parent.addNestedUnitOfWork(this);
|
||||
}
|
||||
this.session = session;
|
||||
CacheLoader<String, Object> cacheLoader = null;
|
||||
if (parent != null) {
|
||||
cacheLoader =
|
||||
new CacheLoader<String, Object>() {
|
||||
|
||||
Cache<String, Object> cache = parent.getCache();
|
||||
|
||||
@Override
|
||||
public Object load(String key) throws CacheLoaderException {
|
||||
return cache.get(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Object> loadAll(Iterable<? extends String> keys)
|
||||
throws CacheLoaderException {
|
||||
Map<String, Object> kvp = new HashMap<String, Object>();
|
||||
for (String key : keys) {
|
||||
kvp.put(key, cache.get(key));
|
||||
}
|
||||
return kvp;
|
||||
}
|
||||
};
|
||||
}
|
||||
this.elapsedTime = Stopwatch.createUnstarted();
|
||||
this.statementCache = new EvictTrackingMapCache<String, Object>(null, "UOW(" + hashCode() + ")", cacheLoader, true);
|
||||
}
|
||||
|
||||
public void addDatabaseTime(String name, Stopwatch amount) {
|
||||
Double time = databaseTime.get(name);
|
||||
if (time == null) {
|
||||
databaseTime.put(name, (double) amount.elapsed(TimeUnit.MICROSECONDS));
|
||||
} else {
|
||||
databaseTime.put(name, time + amount.elapsed(TimeUnit.MICROSECONDS));
|
||||
}
|
||||
}
|
||||
|
||||
public void addCacheLookupTime(Stopwatch amount) {
|
||||
cacheLookupTimeMSecs += amount.elapsed(TimeUnit.MICROSECONDS);
|
||||
}
|
||||
|
||||
public void addNestedUnitOfWork(UnitOfWork uow) {
|
||||
synchronized (nested) {
|
||||
nested.add(uow);
|
||||
}
|
||||
}
|
||||
public interface UnitOfWork<X extends Exception> extends AutoCloseable {
|
||||
|
||||
/**
|
||||
* Marks the beginning of a transactional section of work. Will write a
|
||||
|
@ -155,662 +30,43 @@ public class UnitOfWork implements AutoCloseable {
|
|||
*
|
||||
* @return the handle used to commit or abort the work.
|
||||
*/
|
||||
public synchronized UnitOfWork begin() {
|
||||
elapsedTime.start();
|
||||
// log.record(txn::start)
|
||||
return this;
|
||||
}
|
||||
UnitOfWork<X> begin();
|
||||
|
||||
public String getPurpose() {
|
||||
return purpose;
|
||||
}
|
||||
|
||||
public UnitOfWork setPurpose(String purpose) {
|
||||
this.purpose = purpose;
|
||||
return this;
|
||||
}
|
||||
|
||||
public void addFuture(CompletableFuture<?> future) {
|
||||
asyncOperationFutures.add(future);
|
||||
}
|
||||
|
||||
public void setInfo(String info) {
|
||||
this.info = info;
|
||||
}
|
||||
|
||||
public void recordCacheAndDatabaseOperationCount(int cache, int ops) {
|
||||
if (cache > 0) {
|
||||
cacheHits += cache;
|
||||
} else {
|
||||
cacheMisses += Math.abs(cache);
|
||||
}
|
||||
if (ops > 0) {
|
||||
databaseLookups += ops;
|
||||
}
|
||||
}
|
||||
|
||||
public String logTimers(String what) {
|
||||
double e = (double) elapsedTime.elapsed(TimeUnit.MICROSECONDS) / 1000.0;
|
||||
double d = 0.0;
|
||||
double c = cacheLookupTimeMSecs / 1000.0;
|
||||
double fc = (c / e) * 100.0;
|
||||
String database = "";
|
||||
if (databaseTime.size() > 0) {
|
||||
List<String> dbt = new ArrayList<>(databaseTime.size());
|
||||
for (Map.Entry<String, Double> dt : databaseTime.entrySet()) {
|
||||
double t = dt.getValue() / 1000.0;
|
||||
d += t;
|
||||
dbt.add(String.format("%s took %,.3fms %,2.2f%%", dt.getKey(), t, (t / e) * 100.0));
|
||||
}
|
||||
double fd = (d / e) * 100.0;
|
||||
database =
|
||||
String.format(
|
||||
", %d quer%s (%,.3fms %,2.2f%% - %s)",
|
||||
databaseLookups, (databaseLookups > 1) ? "ies" : "y", d, fd, String.join(", ", dbt));
|
||||
}
|
||||
String cache = "";
|
||||
if (cacheLookupTimeMSecs > 0) {
|
||||
int cacheLookups = cacheHits + cacheMisses;
|
||||
cache =
|
||||
String.format(
|
||||
" with %d cache lookup%s (%,.3fms %,2.2f%% - %,d hit, %,d miss)",
|
||||
cacheLookups, cacheLookups > 1 ? "s" : "", c, fc, cacheHits, cacheMisses);
|
||||
}
|
||||
String da = "";
|
||||
if (databaseTime.size() > 0 || cacheLookupTimeMSecs > 0) {
|
||||
double dat = d + c;
|
||||
double daf = (dat / e) * 100;
|
||||
da =
|
||||
String.format(
|
||||
" consuming %,.3fms for data access, or %,2.2f%% of total UOW time.", dat, daf);
|
||||
}
|
||||
String x = nestedPurposes.stream().distinct().collect(Collectors.joining(", "));
|
||||
String n =
|
||||
nested
|
||||
.stream()
|
||||
.map(uow -> String.valueOf(uow.hashCode()))
|
||||
.collect(Collectors.joining(", "));
|
||||
String s =
|
||||
String.format(
|
||||
Locale.US,
|
||||
"UOW(%s%s) %s in %,.3fms%s%s%s%s%s%s",
|
||||
hashCode(),
|
||||
(nested.size() > 0 ? ", [" + n + "]" : ""),
|
||||
what,
|
||||
e,
|
||||
cache,
|
||||
database,
|
||||
da,
|
||||
(purpose == null ? "" : " " + purpose),
|
||||
(nestedPurposes.isEmpty()) ? "" : ", " + x,
|
||||
(info == null) ? "" : " " + info);
|
||||
return s;
|
||||
}
|
||||
|
||||
private void applyPostCommitFunctions(String what, List<CheckedRunnable> thunks, Consumer<? super Throwable> exceptionallyThunk) {
|
||||
if (!thunks.isEmpty()) {
|
||||
for (CheckedRunnable f : thunks) {
|
||||
try {
|
||||
f.run();
|
||||
} catch (Throwable t) {
|
||||
if (exceptionallyThunk != null) {
|
||||
exceptionallyThunk.accept(t);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public Optional<Object> cacheLookup(List<Facet> facets) {
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
Optional<Object> result = Optional.empty();
|
||||
for (Facet facet : facets) {
|
||||
if (!facet.fixed()) {
|
||||
String columnName = facet.name() + "==" + facet.value();
|
||||
Either<Object, List<Facet>> eitherValue = cache.get(tableName, columnName);
|
||||
if (eitherValue != null) {
|
||||
Object value = deleted;
|
||||
if (eitherValue.isLeft()) {
|
||||
value = eitherValue.getLeft();
|
||||
}
|
||||
return Optional.of(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Be sure to check all enclosing UnitOfWork caches as well, we may be nested.
|
||||
result = checkParentCache(facets);
|
||||
if (result.isPresent()) {
|
||||
Object r = result.get();
|
||||
Class<?> iface = MappingUtil.getMappingInterface(r);
|
||||
if (Helenus.entity(iface).isDraftable()) {
|
||||
cacheUpdate(r, facets);
|
||||
} else {
|
||||
cacheUpdate(SerializationUtils.<Serializable>clone((Serializable) r), facets);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private Optional<Object> checkParentCache(List<Facet> facets) {
|
||||
Optional<Object> result = Optional.empty();
|
||||
if (parent != null) {
|
||||
result = parent.checkParentCache(facets);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public List<Facet> cacheEvict(List<Facet> facets) {
|
||||
Either<Object, List<Facet>> deletedObjectFacets = Either.right(facets);
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
Optional<Object> optionalValue = cacheLookup(facets);
|
||||
|
||||
for (Facet facet : facets) {
|
||||
if (!facet.fixed()) {
|
||||
String columnKey = facet.name() + "==" + facet.value();
|
||||
// mark the value identified by the facet to `deleted`
|
||||
cache.put(tableName, columnKey, deletedObjectFacets);
|
||||
}
|
||||
}
|
||||
|
||||
// Now, look for other row/col pairs that referenced the same object, mark them
|
||||
// `deleted` if the cache had a value before we added the deleted marker objects.
|
||||
if (optionalValue.isPresent()) {
|
||||
Object value = optionalValue.get();
|
||||
cache
|
||||
.columnKeySet()
|
||||
.forEach(
|
||||
columnKey -> {
|
||||
Either<Object, List<Facet>> eitherCachedValue = cache.get(tableName, columnKey);
|
||||
if (eitherCachedValue.isLeft()) {
|
||||
Object cachedValue = eitherCachedValue.getLeft();
|
||||
if (cachedValue == value) {
|
||||
cache.put(tableName, columnKey, deletedObjectFacets);
|
||||
String[] parts = columnKey.split("==");
|
||||
facets.add(new Facet<String>(parts[0], parts[1]));
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
return facets;
|
||||
}
|
||||
|
||||
public Cache<String, Object> getCache() {
|
||||
return statementCache;
|
||||
}
|
||||
|
||||
public Object cacheUpdate(Object value, List<Facet> facets) {
|
||||
Object result = null;
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
for (Facet facet : facets) {
|
||||
if (!facet.fixed()) {
|
||||
if (facet.alone()) {
|
||||
String columnName = facet.name() + "==" + facet.value();
|
||||
if (result == null) result = cache.get(tableName, columnName);
|
||||
cache.put(tableName, columnName, Either.left(value));
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public void batch(AbstractOperation s) {
|
||||
if (batch == null) {
|
||||
batch = new BatchOperation(session);
|
||||
}
|
||||
batch.add(s);
|
||||
}
|
||||
|
||||
private Iterator<UnitOfWork> getChildNodes() {
|
||||
return nested.iterator();
|
||||
}
|
||||
void addNestedUnitOfWork(UnitOfWork<X> uow);
|
||||
|
||||
/**
|
||||
* Checks to see if the work performed between calling begin and now can be committed or not.
|
||||
* Checks to see if the work performed between calling begin and now can be
|
||||
* committed or not.
|
||||
*
|
||||
* @return a function from which to chain work that only happens when commit is successful
|
||||
* @throws HelenusException when the work overlaps with other concurrent writers.
|
||||
* @return a function from which to chain work that only happens when commit is
|
||||
* successful
|
||||
* @throws X
|
||||
* when the work overlaps with other concurrent writers.
|
||||
*/
|
||||
public synchronized PostCommitFunction<Void, Void> commit() throws HelenusException {
|
||||
|
||||
if (isDone()) {
|
||||
return PostCommitFunction.NULL_ABORT;
|
||||
}
|
||||
|
||||
// Only the outer-most UOW batches statements for commit time, execute them.
|
||||
if (batch != null) {
|
||||
committedAt = batch.sync(this); //TODO(gburd): update cache with writeTime...
|
||||
}
|
||||
|
||||
// All nested UnitOfWork should be committed (not aborted) before calls to
|
||||
// commit, check.
|
||||
boolean canCommit = true;
|
||||
TreeTraverser<UnitOfWork> traverser = TreeTraverser.using(node -> node::getChildNodes);
|
||||
for (UnitOfWork uow : traverser.postOrderTraversal(this)) {
|
||||
if (this != uow) {
|
||||
canCommit &= (!uow.aborted && uow.committed);
|
||||
}
|
||||
}
|
||||
|
||||
if (!canCommit) {
|
||||
|
||||
if (parent == null) {
|
||||
|
||||
// Apply all post-commit abort functions, this is the outer-most UnitOfWork.
|
||||
traverser
|
||||
.postOrderTraversal(this)
|
||||
.forEach(
|
||||
uow -> {
|
||||
applyPostCommitFunctions("aborted", abortThunks, exceptionallyThunk);
|
||||
});
|
||||
|
||||
elapsedTime.stop();
|
||||
if (LOG.isInfoEnabled()) {
|
||||
LOG.info(logTimers("aborted"));
|
||||
}
|
||||
}
|
||||
|
||||
return PostCommitFunction.NULL_ABORT;
|
||||
} else {
|
||||
committed = true;
|
||||
aborted = false;
|
||||
|
||||
if (parent == null) {
|
||||
|
||||
// Apply all post-commit commit functions, this is the outer-most UnitOfWork.
|
||||
traverser
|
||||
.postOrderTraversal(this)
|
||||
.forEach(
|
||||
uow -> {
|
||||
applyPostCommitFunctions("committed", uow.commitThunks, exceptionallyThunk);
|
||||
});
|
||||
|
||||
// Merge our statement cache into the session cache if it exists.
|
||||
CacheManager cacheManager = session.getCacheManager();
|
||||
if (cacheManager != null) {
|
||||
for (Map.Entry<String, Object> entry :
|
||||
(Set<Map.Entry<String, Object>>) statementCache.<Map>unwrap(Map.class).entrySet()) {
|
||||
String[] keyParts = entry.getKey().split("\\.");
|
||||
if (keyParts.length == 2) {
|
||||
String cacheName = keyParts[0];
|
||||
String key = keyParts[1];
|
||||
if (!StringUtils.isBlank(cacheName) && !StringUtils.isBlank(key)) {
|
||||
Cache<Object, Object> cache = cacheManager.getCache(cacheName);
|
||||
if (cache != null) {
|
||||
Object value = entry.getValue();
|
||||
if (value == deleted) {
|
||||
cache.remove(key);
|
||||
} else {
|
||||
cache.put(key.toString(), value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Merge our cache into the session cache.
|
||||
session.mergeCache(cache);
|
||||
|
||||
// Spoil any lingering futures that may be out there.
|
||||
asyncOperationFutures.forEach(
|
||||
f ->
|
||||
f.completeExceptionally(
|
||||
new HelenusException(
|
||||
"Futures must be resolved before their unit of work has committed/aborted.")));
|
||||
|
||||
elapsedTime.stop();
|
||||
if (LOG.isInfoEnabled()) {
|
||||
LOG.info(logTimers("committed"));
|
||||
}
|
||||
|
||||
return PostCommitFunction.NULL_COMMIT;
|
||||
} else {
|
||||
// Merge cache and statistics into parent if there is one.
|
||||
parent.statementCache.putAll(statementCache.<Map>unwrap(Map.class));
|
||||
parent.statementCache.removeAll(statementCache.getDeletions());
|
||||
parent.mergeCache(cache);
|
||||
parent.addBatched(batch);
|
||||
if (purpose != null) {
|
||||
parent.nestedPurposes.add(purpose);
|
||||
}
|
||||
parent.cacheHits += cacheHits;
|
||||
parent.cacheMisses += cacheMisses;
|
||||
parent.databaseLookups += databaseLookups;
|
||||
parent.cacheLookupTimeMSecs += cacheLookupTimeMSecs;
|
||||
for (Map.Entry<String, Double> dt : databaseTime.entrySet()) {
|
||||
String name = dt.getKey();
|
||||
if (parent.databaseTime.containsKey(name)) {
|
||||
double t = parent.databaseTime.get(name);
|
||||
parent.databaseTime.put(name, t + dt.getValue());
|
||||
} else {
|
||||
parent.databaseTime.put(name, dt.getValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// TODO(gburd): hopefully we'll be able to detect conflicts here and so we'd want to...
|
||||
// else {
|
||||
// Constructor<T> ctor = clazz.getConstructor(conflictExceptionClass);
|
||||
// T object = ctor.newInstance(new Object[] { String message });
|
||||
// }
|
||||
return new PostCommitFunction<Void, Void>(commitThunks, abortThunks, exceptionallyThunk, true);
|
||||
}
|
||||
|
||||
private void addBatched(BatchOperation batchArg) {
|
||||
if (batchArg != null) {
|
||||
if (this.batch == null) {
|
||||
this.batch = batchArg;
|
||||
} else {
|
||||
this.batch.addAll(batchArg);
|
||||
}
|
||||
}
|
||||
}
|
||||
PostCommitFunction<Void, Void> commit() throws X;
|
||||
|
||||
/**
|
||||
* Explicitly abort the work within this unit of work. Any nested aborted unit of work will
|
||||
* trigger the entire unit of work to commit.
|
||||
* Explicitly abort the work within this unit of work. Any nested aborted unit
|
||||
* of work will trigger the entire unit of work to commit.
|
||||
*/
|
||||
public synchronized void abort() {
|
||||
if (!aborted) {
|
||||
aborted = true;
|
||||
void abort();
|
||||
|
||||
// Spoil any pending futures created within the context of this unit of work.
|
||||
asyncOperationFutures.forEach(
|
||||
f ->
|
||||
f.completeExceptionally(
|
||||
new HelenusException(
|
||||
"Futures must be resolved before their unit of work has committed/aborted.")));
|
||||
boolean hasAborted();
|
||||
|
||||
TreeTraverser<UnitOfWork> traverser = TreeTraverser.using(node -> node::getChildNodes);
|
||||
traverser
|
||||
.postOrderTraversal(this)
|
||||
.forEach(
|
||||
uow -> {
|
||||
applyPostCommitFunctions("aborted", uow.abortThunks, exceptionallyThunk);
|
||||
uow.abortThunks.clear();
|
||||
});
|
||||
boolean hasCommitted();
|
||||
|
||||
if (parent == null) {
|
||||
if (elapsedTime.isRunning()) {
|
||||
elapsedTime.stop();
|
||||
}
|
||||
if (LOG.isInfoEnabled()) {
|
||||
LOG.info(logTimers("aborted"));
|
||||
}
|
||||
}
|
||||
Optional<Object> cacheLookup(List<Facet> facets);
|
||||
|
||||
// TODO(gburd): when we integrate the transaction support we'll need to...
|
||||
// log.record(txn::abort)
|
||||
// cache.invalidateSince(txn::start time)
|
||||
}
|
||||
}
|
||||
void cacheUpdate(Object pojo, List<Facet> facets);
|
||||
|
||||
private void mergeCache(Table<String, String, Either<Object, List<Facet>>> from) {
|
||||
Table<String, String, Either<Object, List<Facet>>> to = this.cache;
|
||||
from.rowMap()
|
||||
.forEach(
|
||||
(rowKey, columnMap) -> {
|
||||
columnMap.forEach(
|
||||
(columnKey, value) -> {
|
||||
if (to.contains(rowKey, columnKey)) {
|
||||
to.put(
|
||||
rowKey,
|
||||
columnKey,
|
||||
Either.left(
|
||||
CacheUtil.merge(
|
||||
to.get(rowKey, columnKey).getLeft(),
|
||||
from.get(rowKey, columnKey).getLeft())));
|
||||
} else {
|
||||
to.put(rowKey, columnKey, from.get(rowKey, columnKey));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
List<Facet> cacheEvict(List<Facet> facets);
|
||||
|
||||
public boolean isDone() {
|
||||
return aborted || committed;
|
||||
}
|
||||
UnitOfWork setPurpose(String purpose);
|
||||
|
||||
public String describeConflicts() {
|
||||
return "it's complex...";
|
||||
}
|
||||
void addDatabaseTime(String name, Stopwatch amount);
|
||||
void addCacheLookupTime(Stopwatch amount);
|
||||
|
||||
@Override
|
||||
public void close() throws HelenusException {
|
||||
// Closing a UnitOfWork will abort iff we've not already aborted or committed this unit of work.
|
||||
if (aborted == false && committed == false) {
|
||||
abort();
|
||||
}
|
||||
}
|
||||
// Cache > 0 means "cache hit", < 0 means cache miss.
|
||||
void recordCacheAndDatabaseOperationCount(int cache, int database);
|
||||
|
||||
public boolean hasAborted() {
|
||||
return aborted;
|
||||
}
|
||||
|
||||
public boolean hasCommitted() {
|
||||
return committed;
|
||||
}
|
||||
|
||||
public long committedAt() {
|
||||
return committedAt;
|
||||
}
|
||||
|
||||
private static class EvictTrackingMapCache<K, V> implements Cache<K, V> {
|
||||
private final Set<K> deletes;
|
||||
private final Cache<K, V> delegate;
|
||||
|
||||
public EvictTrackingMapCache(CacheManager manager, String name, CacheLoader<K, V> cacheLoader,
|
||||
boolean isReadThrough) {
|
||||
deletes = Collections.synchronizedSet(new HashSet<>());
|
||||
delegate = new MapCache<>(manager, name, cacheLoader, isReadThrough);
|
||||
}
|
||||
|
||||
/** Non-interface method; should only be called by UnitOfWork when merging to an enclosing UnitOfWork. */
|
||||
public Set<K> getDeletions() {
|
||||
return new HashSet<>(deletes);
|
||||
}
|
||||
|
||||
@Override
|
||||
public V get(K key) {
|
||||
if (deletes.contains(key)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return delegate.get(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<K, V> getAll(Set<? extends K> keys) {
|
||||
Set<? extends K> clonedKeys = new HashSet<>(keys);
|
||||
clonedKeys.removeAll(deletes);
|
||||
return delegate.getAll(clonedKeys);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean containsKey(K key) {
|
||||
if (deletes.contains(key)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return delegate.containsKey(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void loadAll(Set<? extends K> keys, boolean replaceExistingValues, CompletionListener listener) {
|
||||
Set<? extends K> clonedKeys = new HashSet<>(keys);
|
||||
clonedKeys.removeAll(deletes);
|
||||
delegate.loadAll(clonedKeys, replaceExistingValues, listener);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void put(K key, V value) {
|
||||
if (deletes.contains(key)) {
|
||||
deletes.remove(key);
|
||||
}
|
||||
|
||||
delegate.put(key, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public V getAndPut(K key, V value) {
|
||||
if (deletes.contains(key)) {
|
||||
deletes.remove(key);
|
||||
}
|
||||
|
||||
return delegate.getAndPut(key, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void putAll(Map<? extends K, ? extends V> map) {
|
||||
deletes.removeAll(map.keySet());
|
||||
delegate.putAll(map);
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized boolean putIfAbsent(K key, V value) {
|
||||
if (!delegate.containsKey(key) && deletes.contains(key)) {
|
||||
deletes.remove(key);
|
||||
}
|
||||
|
||||
return delegate.putIfAbsent(key, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean remove(K key) {
|
||||
boolean removed = delegate.remove(key);
|
||||
deletes.add(key);
|
||||
return removed;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean remove(K key, V value) {
|
||||
boolean removed = delegate.remove(key, value);
|
||||
if (removed) {
|
||||
deletes.add(key);
|
||||
}
|
||||
|
||||
return removed;
|
||||
}
|
||||
|
||||
@Override
|
||||
public V getAndRemove(K key) {
|
||||
V value = delegate.getAndRemove(key);
|
||||
deletes.add(key);
|
||||
return value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void removeAll(Set<? extends K> keys) {
|
||||
Set<? extends K> cloneKeys = new HashSet<>(keys);
|
||||
delegate.removeAll(cloneKeys);
|
||||
deletes.addAll(cloneKeys);
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public synchronized void removeAll() {
|
||||
Map<K, V> impl = delegate.unwrap(Map.class);
|
||||
Set<K> keys = impl.keySet();
|
||||
delegate.removeAll();
|
||||
deletes.addAll(keys);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
delegate.clear();
|
||||
// TODO(gburd): all parents too
|
||||
deletes.clear();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean replace(K key, V oldValue, V newValue) {
|
||||
if (deletes.contains(key)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return delegate.replace(key, oldValue, newValue);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean replace(K key, V value) {
|
||||
if (deletes.contains(key)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return delegate.replace(key, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public V getAndReplace(K key, V value) {
|
||||
if (deletes.contains(key)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return delegate.getAndReplace(key, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <C extends Configuration<K, V>> C getConfiguration(Class<C> clazz) {
|
||||
return delegate.getConfiguration(clazz);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T invoke(K key, EntryProcessor<K, V, T> processor, Object... arguments)
|
||||
throws EntryProcessorException {
|
||||
if (deletes.contains(key)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return delegate.invoke(key, processor, arguments);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> Map<K, EntryProcessorResult<T>> invokeAll(Set<? extends K> keys, EntryProcessor<K, V, T> processor,
|
||||
Object... arguments) {
|
||||
Set<? extends K> clonedKeys = new HashSet<>(keys);
|
||||
clonedKeys.removeAll(deletes);
|
||||
return delegate.invokeAll(clonedKeys, processor, arguments);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return delegate.getName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public CacheManager getCacheManager() {
|
||||
return delegate.getCacheManager();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
delegate.close();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isClosed() {
|
||||
return delegate.isClosed();
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T unwrap(Class<T> clazz) {
|
||||
return delegate.unwrap(clazz);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void registerCacheEntryListener(CacheEntryListenerConfiguration<K, V> cacheEntryListenerConfiguration) {
|
||||
delegate.registerCacheEntryListener(cacheEntryListenerConfiguration);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deregisterCacheEntryListener(CacheEntryListenerConfiguration<K, V> cacheEntryListenerConfiguration) {
|
||||
delegate.deregisterCacheEntryListener(cacheEntryListenerConfiguration);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<Entry<K, V>> iterator() {
|
||||
return delegate.iterator();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
26
src/main/java/net/helenus/core/UnitOfWorkImpl.java
Normal file
26
src/main/java/net/helenus/core/UnitOfWorkImpl.java
Normal file
|
@ -0,0 +1,26 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import net.helenus.support.HelenusException;
|
||||
|
||||
class UnitOfWorkImpl extends AbstractUnitOfWork<HelenusException> {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public UnitOfWorkImpl(HelenusSession session, UnitOfWork parent) {
|
||||
super(session, (AbstractUnitOfWork<HelenusException>) parent);
|
||||
}
|
||||
}
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,9 +15,11 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import com.datastax.driver.core.UserType;
|
||||
import com.datastax.driver.core.schemabuilder.SchemaStatement;
|
||||
import java.util.List;
|
||||
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.support.HelenusException;
|
||||
|
||||
|
@ -34,12 +35,12 @@ public final class UserTypeOperations {
|
|||
|
||||
public void createUserType(HelenusEntity entity) {
|
||||
|
||||
sessionOps.execute(SchemaUtil.createUserType(entity));
|
||||
sessionOps.execute(SchemaUtil.createUserType(entity), true);
|
||||
}
|
||||
|
||||
public void dropUserType(HelenusEntity entity) {
|
||||
|
||||
sessionOps.execute(SchemaUtil.dropUserType(entity));
|
||||
sessionOps.execute(SchemaUtil.dropUserType(entity), true);
|
||||
}
|
||||
|
||||
public void validateUserType(UserType userType, HelenusEntity entity) {
|
||||
|
@ -53,10 +54,7 @@ public final class UserTypeOperations {
|
|||
|
||||
if (!list.isEmpty()) {
|
||||
throw new HelenusException(
|
||||
"schema changed for entity "
|
||||
+ entity.getMappingInterface()
|
||||
+ ", apply this command: "
|
||||
+ list);
|
||||
"schema changed for entity " + entity.getMappingInterface() + ", apply this command: " + list);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -72,6 +70,8 @@ public final class UserTypeOperations {
|
|||
|
||||
private void executeBatch(List<SchemaStatement> list) {
|
||||
|
||||
list.forEach(s -> sessionOps.execute(s));
|
||||
list.forEach(s -> {
|
||||
sessionOps.execute(s, true);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -23,4 +22,5 @@ import java.lang.annotation.Target;
|
|||
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(ElementType.TYPE)
|
||||
public @interface Cacheable {}
|
||||
public @interface Cacheable {
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -22,15 +21,14 @@ import java.lang.annotation.Retention;
|
|||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import net.helenus.core.ConflictingUnitOfWorkException;
|
||||
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(ElementType.METHOD)
|
||||
public @interface Retry {
|
||||
|
||||
Class<? extends Exception>[] on() default {
|
||||
ConflictingUnitOfWorkException.class, TimeoutException.class
|
||||
};
|
||||
Class<? extends Exception>[] on() default {ConflictingUnitOfWorkException.class, TimeoutException.class};
|
||||
|
||||
int times() default 3;
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -19,7 +18,7 @@ package net.helenus.core.aspect;
|
|||
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.Arrays;
|
||||
import net.helenus.core.annotation.Retry;
|
||||
|
||||
import org.apache.commons.lang3.exception.ExceptionUtils;
|
||||
import org.aspectj.lang.ProceedingJoinPoint;
|
||||
import org.aspectj.lang.annotation.Around;
|
||||
|
@ -30,6 +29,8 @@ import org.slf4j.LoggerFactory;
|
|||
import org.springframework.core.annotation.AnnotationUtils;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import net.helenus.core.annotation.Retry;
|
||||
|
||||
@Aspect
|
||||
public class RetryAspect {
|
||||
|
||||
|
@ -54,8 +55,8 @@ public class RetryAspect {
|
|||
return tryProceeding(pjp, times, retryOn);
|
||||
}
|
||||
|
||||
private Object tryProceeding(
|
||||
ProceedingJoinPoint pjp, int times, Class<? extends Throwable>[] retryOn) throws Throwable {
|
||||
private Object tryProceeding(ProceedingJoinPoint pjp, int times, Class<? extends Throwable>[] retryOn)
|
||||
throws Throwable {
|
||||
try {
|
||||
return proceed(pjp);
|
||||
} catch (Throwable throwable) {
|
||||
|
|
|
@ -2,7 +2,7 @@ package net.helenus.core.aspect;
|
|||
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.Arrays;
|
||||
import net.helenus.core.annotation.Retry;
|
||||
|
||||
import org.apache.commons.lang3.exception.ExceptionUtils;
|
||||
import org.aspectj.lang.ProceedingJoinPoint;
|
||||
import org.aspectj.lang.annotation.Around;
|
||||
|
@ -13,6 +13,8 @@ import org.slf4j.LoggerFactory;
|
|||
import org.springframework.core.annotation.AnnotationUtils;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import net.helenus.core.annotation.Retry;
|
||||
|
||||
@Aspect
|
||||
public class RetryConcurrentUnitOfWorkAspect {
|
||||
|
||||
|
@ -37,8 +39,8 @@ public class RetryConcurrentUnitOfWorkAspect {
|
|||
return tryProceeding(pjp, times, retryOn);
|
||||
}
|
||||
|
||||
private Object tryProceeding(
|
||||
ProceedingJoinPoint pjp, int times, Class<? extends Throwable>[] retryOn) throws Throwable {
|
||||
private Object tryProceeding(ProceedingJoinPoint pjp, int times, Class<? extends Throwable>[] retryOn)
|
||||
throws Throwable {
|
||||
try {
|
||||
return proceed(pjp);
|
||||
} catch (Throwable throwable) {
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,45 +15,24 @@
|
|||
*/
|
||||
package net.helenus.core.cache;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
|
||||
public class BoundFacet extends Facet<String> {
|
||||
private final Map<HelenusProperty, Object> properties;
|
||||
|
||||
public BoundFacet(HelenusProperty property, Object value) {
|
||||
super(property.getPropertyName(), value == null ? null : value.toString());
|
||||
this.properties = new HashMap<HelenusProperty, Object>(1);
|
||||
this.properties.put(property, value);
|
||||
}
|
||||
|
||||
public Set<HelenusProperty> getProperties() {
|
||||
return properties.keySet();
|
||||
}
|
||||
|
||||
public BoundFacet(String name, Map<HelenusProperty, Object> properties) {
|
||||
super(
|
||||
name,
|
||||
BoundFacet(String name, Map<HelenusProperty, Object> properties) {
|
||||
super(name,
|
||||
(properties.keySet().size() > 1)
|
||||
? "["
|
||||
+ String.join(
|
||||
", ",
|
||||
properties
|
||||
.keySet()
|
||||
.stream()
|
||||
.map(key -> properties.get(key).toString())
|
||||
? "[" + String.join(", ",
|
||||
properties.keySet().stream().map(key -> properties.get(key).toString())
|
||||
.collect(Collectors.toSet()))
|
||||
+ "]"
|
||||
: String.join(
|
||||
"",
|
||||
properties
|
||||
.keySet()
|
||||
.stream()
|
||||
.map(key -> properties.get(key).toString())
|
||||
: String.join("", properties.keySet().stream().map(key -> properties.get(key).toString())
|
||||
.collect(Collectors.toSet())));
|
||||
this.properties = properties;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
190
src/main/java/net/helenus/core/cache/CacheUtil.java
vendored
190
src/main/java/net/helenus/core/cache/CacheUtil.java
vendored
|
@ -1,23 +1,15 @@
|
|||
package net.helenus.core.cache;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.reflect.Entity;
|
||||
import net.helenus.core.reflect.MapExportable;
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
import net.helenus.mapping.MappingUtil;
|
||||
import net.helenus.mapping.value.BeanColumnValueProvider;
|
||||
|
||||
public class CacheUtil {
|
||||
|
||||
public static List<String[]> combinations(List<String> items) {
|
||||
int n = items.size();
|
||||
if (n > 20) throw new IllegalArgumentException(n + " is out of range");
|
||||
if (n > 20 || n < 0)
|
||||
throw new IllegalArgumentException(n + " is out of range");
|
||||
long e = Math.round(Math.pow(2, n));
|
||||
List<String[]> out = new ArrayList<String[]>((int) e - 1);
|
||||
for (int k = 1; k <= items.size(); k++) {
|
||||
|
@ -26,8 +18,7 @@ public class CacheUtil {
|
|||
return out;
|
||||
}
|
||||
|
||||
private static void kCombinations(
|
||||
List<String> items, int n, int k, String[] arr, List<String[]> out) {
|
||||
private static void kCombinations(List<String> items, int n, int k, String[] arr, List<String[]> out) {
|
||||
if (k == 0) {
|
||||
out.add(arr.clone());
|
||||
} else {
|
||||
|
@ -38,184 +29,21 @@ public class CacheUtil {
|
|||
}
|
||||
}
|
||||
|
||||
public static List<String> flatKeys(String table, List<Facet> facets) {
|
||||
return flattenFacets(facets)
|
||||
.stream()
|
||||
.map(
|
||||
combination -> {
|
||||
return table + "." + Arrays.toString(combination);
|
||||
})
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
public static List<String[]> flattenFacets(List<Facet> facets) {
|
||||
List<String[]> combinations =
|
||||
CacheUtil.combinations(
|
||||
facets
|
||||
.stream()
|
||||
.filter(facet -> !facet.fixed())
|
||||
.filter(facet -> facet.value() != null)
|
||||
.map(
|
||||
facet -> {
|
||||
List<String[]> combinations = CacheUtil.combinations(
|
||||
facets.stream().filter(facet -> !facet.fixed()).filter(facet -> facet.value() != null).map(facet -> {
|
||||
return facet.name() + "==" + facet.value();
|
||||
})
|
||||
.collect(Collectors.toList()));
|
||||
// TODO(gburd): rework so as to not generate the combinations at all rather than filter
|
||||
facets =
|
||||
facets
|
||||
.stream()
|
||||
.filter(f -> !f.fixed())
|
||||
.filter(f -> !f.alone() || !f.combined())
|
||||
.collect(Collectors.toList());
|
||||
for (Facet facet : facets) {
|
||||
combinations =
|
||||
combinations
|
||||
.stream()
|
||||
.filter(
|
||||
combo -> {
|
||||
// When used alone, this facet is not distinct so don't use it as a key.
|
||||
if (combo.length == 1) {
|
||||
if (!facet.alone() && combo[0].startsWith(facet.name() + "==")) {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
if (!facet.combined()) {
|
||||
for (String c : combo) {
|
||||
// Don't use this facet in combination with others to create keys.
|
||||
if (c.startsWith(facet.name() + "==")) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
})
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
}).collect(Collectors.toList()));
|
||||
return combinations;
|
||||
}
|
||||
|
||||
/** Merge changed values in the map behind `from` into `to`. */
|
||||
public static Object merge(Object t, Object f) {
|
||||
HelenusEntity entity = Helenus.resolve(MappingUtil.getMappingInterface(t));
|
||||
|
||||
if (t == f) return t;
|
||||
if (f == null) return t;
|
||||
if (t == null) return f;
|
||||
|
||||
if (t instanceof MapExportable
|
||||
&& t instanceof Entity
|
||||
&& f instanceof MapExportable
|
||||
&& f instanceof Entity) {
|
||||
Entity to = (Entity) t;
|
||||
Entity from = (Entity) f;
|
||||
Map<String, Object> toValueMap = ((MapExportable) to).toMap();
|
||||
Map<String, Object> fromValueMap = ((MapExportable) from).toMap();
|
||||
for (HelenusProperty prop : entity.getOrderedProperties()) {
|
||||
switch (prop.getColumnType()) {
|
||||
case PARTITION_KEY:
|
||||
case CLUSTERING_COLUMN:
|
||||
continue;
|
||||
default:
|
||||
Object toVal = BeanColumnValueProvider.INSTANCE.getColumnValue(to, -1, prop, false);
|
||||
Object fromVal = BeanColumnValueProvider.INSTANCE.getColumnValue(from, -1, prop, false);
|
||||
String ttlKey = ttlKey(prop);
|
||||
String writeTimeKey = writeTimeKey(prop);
|
||||
int[] toTtlI = (int[]) toValueMap.get(ttlKey);
|
||||
int toTtl = (toTtlI != null) ? toTtlI[0] : 0;
|
||||
Long toWriteTime = (Long) toValueMap.get(writeTimeKey);
|
||||
int[] fromTtlI = (int[]) fromValueMap.get(ttlKey);
|
||||
int fromTtl = (fromTtlI != null) ? fromTtlI[0] : 0;
|
||||
Long fromWriteTime = (Long) fromValueMap.get(writeTimeKey);
|
||||
|
||||
if (toVal != null) {
|
||||
if (fromVal != null) {
|
||||
if (toVal == fromVal) {
|
||||
// Case: object identity
|
||||
// Goal: ensure write time and ttl are also in sync
|
||||
if (fromWriteTime != null
|
||||
&& fromWriteTime != 0L
|
||||
&& (toWriteTime == null || fromWriteTime > toWriteTime)) {
|
||||
((MapExportable) to).put(writeTimeKey, fromWriteTime);
|
||||
}
|
||||
if (fromTtl > 0 && fromTtl > toTtl) {
|
||||
((MapExportable) to).put(ttlKey, fromTtl);
|
||||
}
|
||||
} else if (fromWriteTime != null && fromWriteTime != 0L) {
|
||||
// Case: to exists and from exists
|
||||
// Goal: copy over from -> to iff from.writeTime > to.writeTime
|
||||
if (toWriteTime != null && toWriteTime != 0L) {
|
||||
if (fromWriteTime > toWriteTime) {
|
||||
((MapExportable) to).put(prop.getPropertyName(), fromVal);
|
||||
((MapExportable) to).put(writeTimeKey, fromWriteTime);
|
||||
if (fromTtl > 0) {
|
||||
((MapExportable) to).put(ttlKey, fromTtl);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
((MapExportable) to).put(prop.getPropertyName(), fromVal);
|
||||
((MapExportable) to).put(writeTimeKey, fromWriteTime);
|
||||
if (fromTtl > 0) {
|
||||
((MapExportable) to).put(ttlKey, fromTtl);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (toWriteTime == null || toWriteTime == 0L) {
|
||||
// Caution, entering grey area...
|
||||
if (!toVal.equals(fromVal)) {
|
||||
// dangerous waters here, values diverge without information that enables resolution,
|
||||
// policy (for now) is to move value from -> to anyway.
|
||||
((MapExportable) to).put(prop.getPropertyName(), fromVal);
|
||||
if (fromTtl > 0) {
|
||||
((MapExportable) to).put(ttlKey, fromTtl);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Case: from exists, but to doesn't (it's null)
|
||||
// Goal: copy over from -> to, include ttl and writeTime if present
|
||||
if (fromVal != null) {
|
||||
((MapExportable) to).put(prop.getPropertyName(), fromVal);
|
||||
if (fromWriteTime != null && fromWriteTime != 0L) {
|
||||
((MapExportable) to).put(writeTimeKey, fromWriteTime);
|
||||
}
|
||||
if (fromTtl > 0) {
|
||||
((MapExportable) to).put(ttlKey, fromTtl);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return to;
|
||||
}
|
||||
return t;
|
||||
public static Object merge(Object to, Object from) {
|
||||
return to; // TODO(gburd): yeah...
|
||||
}
|
||||
|
||||
public static String schemaName(List<Facet> facets) {
|
||||
return facets
|
||||
.stream()
|
||||
.filter(Facet::fixed)
|
||||
.map(facet -> facet.value().toString())
|
||||
return facets.stream().filter(Facet::fixed).map(facet -> facet.value().toString())
|
||||
.collect(Collectors.joining("."));
|
||||
}
|
||||
|
||||
public static String writeTimeKey(HelenusProperty prop) {
|
||||
return writeTimeKey(prop.getColumnName().toCql(false));
|
||||
}
|
||||
|
||||
public static String ttlKey(HelenusProperty prop) {
|
||||
return ttlKey(prop.getColumnName().toCql(false));
|
||||
}
|
||||
|
||||
public static String writeTimeKey(String columnName) {
|
||||
String key = "_" + columnName + "_writeTime";
|
||||
return key.toLowerCase();
|
||||
}
|
||||
|
||||
public static String ttlKey(String columnName) {
|
||||
String key = "_" + columnName + "_ttl";
|
||||
return key.toLowerCase();
|
||||
}
|
||||
}
|
||||
|
|
43
src/main/java/net/helenus/core/cache/CaffeineCache.java
vendored
Normal file
43
src/main/java/net/helenus/core/cache/CaffeineCache.java
vendored
Normal file
|
@ -0,0 +1,43 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package net.helenus.core.cache;
|
||||
|
||||
public class CaffeineCache<K, V> implements SessionCache<K, V> {
|
||||
|
||||
|
||||
final Cache<K, V> cache;
|
||||
|
||||
CaffeineCache(Cache<K, V> cache) {
|
||||
this.cache = cache;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void invalidate(K key) {
|
||||
cache.invalidate(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public V get(K key) {
|
||||
return cache.getIfPresent(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void put(K key, V value) {
|
||||
cache.put(key, value);
|
||||
}
|
||||
|
||||
}
|
24
src/main/java/net/helenus/core/cache/Facet.java
vendored
24
src/main/java/net/helenus/core/cache/Facet.java
vendored
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -17,13 +16,13 @@
|
|||
|
||||
package net.helenus.core.cache;
|
||||
|
||||
/** An Entity is identifiable via one or more Facets */
|
||||
/**
|
||||
* An Entity is identifiable via one or more Facets
|
||||
*/
|
||||
public class Facet<T> {
|
||||
private final String name;
|
||||
private T value;
|
||||
private boolean fixed = false;
|
||||
private boolean alone = true;
|
||||
private boolean combined = true;
|
||||
|
||||
public Facet(String name) {
|
||||
this.name = name;
|
||||
|
@ -51,19 +50,4 @@ public class Facet<T> {
|
|||
return fixed;
|
||||
}
|
||||
|
||||
public void setUniquelyIdentifyingWhenAlone(boolean alone) {
|
||||
this.alone = alone;
|
||||
}
|
||||
|
||||
public void setUniquelyIdentifyingWhenCombined(boolean combined) {
|
||||
this.combined = combined;
|
||||
}
|
||||
|
||||
public boolean alone() {
|
||||
return alone;
|
||||
}
|
||||
|
||||
public boolean combined() {
|
||||
return combined;
|
||||
}
|
||||
}
|
||||
|
|
44
src/main/java/net/helenus/core/cache/GuavaCache.java
vendored
Normal file
44
src/main/java/net/helenus/core/cache/GuavaCache.java
vendored
Normal file
|
@ -0,0 +1,44 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package net.helenus.core.cache;
|
||||
|
||||
import com.google.common.cache.Cache;
|
||||
|
||||
public class GuavaCache<K, V> implements SessionCache<K, V> {
|
||||
|
||||
final Cache<K, V> cache;
|
||||
|
||||
GuavaCache(Cache<K, V> cache) {
|
||||
this.cache = cache;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void invalidate(K key) {
|
||||
cache.invalidate(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public V get(K key) {
|
||||
return cache.getIfPresent(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void put(K key, V value) {
|
||||
cache.put(key, value);
|
||||
}
|
||||
|
||||
}
|
463
src/main/java/net/helenus/core/cache/MapCache.java
vendored
463
src/main/java/net/helenus/core/cache/MapCache.java
vendored
|
@ -1,463 +0,0 @@
|
|||
package net.helenus.core.cache;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import javax.cache.Cache;
|
||||
import javax.cache.CacheManager;
|
||||
import javax.cache.configuration.CacheEntryListenerConfiguration;
|
||||
import javax.cache.configuration.Configuration;
|
||||
import javax.cache.event.CacheEntryRemovedListener;
|
||||
import javax.cache.integration.CacheLoader;
|
||||
import javax.cache.integration.CompletionListener;
|
||||
import javax.cache.processor.EntryProcessor;
|
||||
import javax.cache.processor.EntryProcessorException;
|
||||
import javax.cache.processor.EntryProcessorResult;
|
||||
import javax.cache.processor.MutableEntry;
|
||||
|
||||
public class MapCache<K, V> implements Cache<K, V> {
|
||||
private final CacheManager manager;
|
||||
private final String name;
|
||||
private Map<K, V> map = new ConcurrentHashMap<>();
|
||||
private Set<CacheEntryRemovedListener<K, V>> cacheEntryRemovedListeners = new HashSet<>();
|
||||
private CacheLoader<K, V> cacheLoader = null;
|
||||
private boolean isReadThrough = false;
|
||||
|
||||
private static class MapConfiguration<K, V> implements Configuration<K, V> {
|
||||
private static final long serialVersionUID = 6093947542772516209L;
|
||||
|
||||
@Override
|
||||
public Class<K> getKeyType() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<V> getValueType() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isStoreByValue() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public MapCache(
|
||||
CacheManager manager, String name, CacheLoader<K, V> cacheLoader, boolean isReadThrough) {
|
||||
this.manager = manager;
|
||||
this.name = name;
|
||||
this.cacheLoader = cacheLoader;
|
||||
this.isReadThrough = isReadThrough;
|
||||
}
|
||||
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public V get(K key) {
|
||||
V value = null;
|
||||
synchronized (map) {
|
||||
value = map.get(key);
|
||||
if (value == null && isReadThrough && cacheLoader != null) {
|
||||
V loadedValue = cacheLoader.load(key);
|
||||
if (loadedValue != null) {
|
||||
map.put(key, loadedValue);
|
||||
value = loadedValue;
|
||||
}
|
||||
}
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public Map<K, V> getAll(Set<? extends K> keys) {
|
||||
Map<K, V> result = null;
|
||||
synchronized (map) {
|
||||
result = new HashMap<K, V>(keys.size());
|
||||
Iterator<? extends K> it = keys.iterator();
|
||||
while (it.hasNext()) {
|
||||
K key = it.next();
|
||||
V value = map.get(key);
|
||||
if (value != null) {
|
||||
result.put(key, value);
|
||||
it.remove();
|
||||
}
|
||||
}
|
||||
if (keys.size() != 0 && isReadThrough && cacheLoader != null) {
|
||||
Map<K, V> loadedValues = cacheLoader.loadAll(keys);
|
||||
for (Map.Entry<K, V> entry : loadedValues.entrySet()) {
|
||||
V v = entry.getValue();
|
||||
if (v != null) {
|
||||
K k = entry.getKey();
|
||||
map.put(k, v);
|
||||
result.put(k, v);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public boolean containsKey(K key) {
|
||||
return map.containsKey(key);
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void loadAll(
|
||||
Set<? extends K> keys, boolean replaceExistingValues, CompletionListener completionListener) {
|
||||
if (cacheLoader != null) {
|
||||
try {
|
||||
synchronized (map) {
|
||||
Map<K, V> loadedValues = cacheLoader.loadAll(keys);
|
||||
for (Map.Entry<K, V> entry : loadedValues.entrySet()) {
|
||||
V value = entry.getValue();
|
||||
K key = entry.getKey();
|
||||
if (value != null) {
|
||||
boolean existsCurrently = map.containsKey(key);
|
||||
if (!existsCurrently || replaceExistingValues) {
|
||||
map.put(key, value);
|
||||
keys.remove(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
if (completionListener != null) {
|
||||
completionListener.onException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (completionListener != null) {
|
||||
if (keys.isEmpty()) {
|
||||
completionListener.onCompletion();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void put(K key, V value) {
|
||||
map.put(key, value);
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public V getAndPut(K key, V value) {
|
||||
V result = null;
|
||||
synchronized (map) {
|
||||
result = map.get(key);
|
||||
if (result == null && isReadThrough && cacheLoader != null) {
|
||||
V loadedValue = cacheLoader.load(key);
|
||||
if (loadedValue != null) {
|
||||
result = loadedValue;
|
||||
}
|
||||
}
|
||||
map.put(key, value);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void putAll(Map<? extends K, ? extends V> map) {
|
||||
synchronized (map) {
|
||||
for (Map.Entry<? extends K, ? extends V> entry : map.entrySet()) {
|
||||
this.map.put(entry.getKey(), entry.getValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public boolean putIfAbsent(K key, V value) {
|
||||
synchronized (map) {
|
||||
if (!map.containsKey(key)) {
|
||||
map.put(key, value);
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public boolean remove(K key) {
|
||||
boolean removed = false;
|
||||
synchronized (map) {
|
||||
removed = map.remove(key) != null;
|
||||
notifyRemovedListeners(key);
|
||||
}
|
||||
return removed;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public boolean remove(K key, V oldValue) {
|
||||
synchronized (map) {
|
||||
V value = map.get(key);
|
||||
if (value != null && oldValue.equals(value)) {
|
||||
map.remove(key);
|
||||
notifyRemovedListeners(key);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public V getAndRemove(K key) {
|
||||
synchronized (map) {
|
||||
V oldValue = null;
|
||||
oldValue = map.get(key);
|
||||
map.remove(key);
|
||||
notifyRemovedListeners(key);
|
||||
return oldValue;
|
||||
}
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public boolean replace(K key, V oldValue, V newValue) {
|
||||
synchronized (map) {
|
||||
V value = map.get(key);
|
||||
if (value != null && oldValue.equals(value)) {
|
||||
map.put(key, newValue);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public boolean replace(K key, V value) {
|
||||
synchronized (map) {
|
||||
if (map.containsKey(key)) {
|
||||
map.put(key, value);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public V getAndReplace(K key, V value) {
|
||||
synchronized (map) {
|
||||
V oldValue = map.get(key);
|
||||
if (value != null && value.equals(oldValue)) {
|
||||
map.put(key, value);
|
||||
return oldValue;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void removeAll(Set<? extends K> keys) {
|
||||
synchronized (map) {
|
||||
Iterator<? extends K> it = keys.iterator();
|
||||
while (it.hasNext()) {
|
||||
K key = it.next();
|
||||
if (map.containsKey(key)) {
|
||||
map.remove(key);
|
||||
} else {
|
||||
it.remove();
|
||||
}
|
||||
}
|
||||
}
|
||||
notifyRemovedListeners(keys);
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void removeAll() {
|
||||
synchronized (map) {
|
||||
Set<K> keys = map.keySet();
|
||||
map.clear();
|
||||
notifyRemovedListeners(keys);
|
||||
}
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void clear() {
|
||||
map.clear();
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public <C extends Configuration<K, V>> C getConfiguration(Class<C> clazz) {
|
||||
if (!MapConfiguration.class.isAssignableFrom(clazz)) {
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public <T> T invoke(K key, EntryProcessor<K, V, T> entryProcessor, Object... arguments)
|
||||
throws EntryProcessorException {
|
||||
// TODO
|
||||
return null;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public <T> Map<K, EntryProcessorResult<T>> invokeAll(
|
||||
Set<? extends K> keys, EntryProcessor<K, V, T> entryProcessor, Object... arguments) {
|
||||
synchronized (map) {
|
||||
for (K key : keys) {
|
||||
V value = map.get(key);
|
||||
if (value != null) {
|
||||
entryProcessor.process(
|
||||
new MutableEntry<K, V>() {
|
||||
@Override
|
||||
public boolean exists() {
|
||||
return map.containsKey(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void remove() {
|
||||
synchronized (map) {
|
||||
V value = map.get(key);
|
||||
if (value != null) {
|
||||
map.remove(key);
|
||||
notifyRemovedListeners(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public K getKey() {
|
||||
return key;
|
||||
}
|
||||
|
||||
@Override
|
||||
public V getValue() {
|
||||
return map.get(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T unwrap(Class<T> clazz) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setValue(V value) {
|
||||
map.put(key, value);
|
||||
}
|
||||
},
|
||||
arguments);
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public CacheManager getCacheManager() {
|
||||
return manager;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void close() {}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public boolean isClosed() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public <T> T unwrap(Class<T> clazz) {
|
||||
if (Map.class.isAssignableFrom(clazz)) {
|
||||
return (T) map;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void registerCacheEntryListener(
|
||||
CacheEntryListenerConfiguration<K, V> cacheEntryListenerConfiguration) {
|
||||
//cacheEntryRemovedListeners.add(cacheEntryListenerConfiguration.getCacheEntryListenerFactory().create());
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void deregisterCacheEntryListener(
|
||||
CacheEntryListenerConfiguration<K, V> cacheEntryListenerConfiguration) {}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public Iterator<Entry<K, V>> iterator() {
|
||||
synchronized (map) {
|
||||
return new Iterator<Entry<K, V>>() {
|
||||
|
||||
Iterator<Map.Entry<K, V>> entries = map.entrySet().iterator();
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return entries.hasNext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Entry<K, V> next() {
|
||||
Map.Entry<K, V> entry = entries.next();
|
||||
return new Entry<K, V>() {
|
||||
K key = entry.getKey();
|
||||
V value = entry.getValue();
|
||||
|
||||
@Override
|
||||
public K getKey() {
|
||||
return key;
|
||||
}
|
||||
|
||||
@Override
|
||||
public V getValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T unwrap(Class<T> clazz) {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public void remove() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private void notifyRemovedListeners(K key) {
|
||||
// if (cacheEntryRemovedListeners != null) {
|
||||
// cacheEntryRemovedListeners.forEach(listener -> listener.onRemoved())
|
||||
// }
|
||||
}
|
||||
|
||||
private void notifyRemovedListeners(Set<? extends K> keys) {}
|
||||
}
|
41
src/main/java/net/helenus/core/cache/MemcacheDbCache.java
vendored
Normal file
41
src/main/java/net/helenus/core/cache/MemcacheDbCache.java
vendored
Normal file
|
@ -0,0 +1,41 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package net.helenus.core.cache;
|
||||
|
||||
public class MemcacheDbCache<K, V> implements SessionCache<K, V> {
|
||||
//final Cache<K, V> cache;
|
||||
|
||||
MemcacheDbCache() {
|
||||
//this.cache = cache;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void invalidate(K key) {
|
||||
//cache.invalidate(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public V get(K key) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void put(K key, V value) {
|
||||
//cache.put(key, value);
|
||||
}
|
||||
|
||||
}
|
41
src/main/java/net/helenus/core/cache/RedisCache.java
vendored
Normal file
41
src/main/java/net/helenus/core/cache/RedisCache.java
vendored
Normal file
|
@ -0,0 +1,41 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package net.helenus.core.cache;
|
||||
|
||||
public class RedisCache<K, V> implements SessionCache<K, V> {
|
||||
//final Cache<K, V> cache;
|
||||
|
||||
RedisCache() {
|
||||
//this.cache = cache;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void invalidate(K key) {
|
||||
//cache.invalidate(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public V get(K key) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void put(K key, V value) {
|
||||
//cache.put(key, value);
|
||||
}
|
||||
|
||||
}
|
36
src/main/java/net/helenus/core/cache/SessionCache.java
vendored
Normal file
36
src/main/java/net/helenus/core/cache/SessionCache.java
vendored
Normal file
|
@ -0,0 +1,36 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package net.helenus.core.cache;
|
||||
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
public interface SessionCache<K, V> {
|
||||
|
||||
static <K, V> SessionCache<K, V> defaultCache() {
|
||||
int MAX_CACHE_SIZE = 10000;
|
||||
int MAX_CACHE_EXPIRE_SECONDS = 600;
|
||||
return new GuavaCache<K, V>(CacheBuilder.newBuilder().maximumSize(MAX_CACHE_SIZE)
|
||||
.expireAfterAccess(MAX_CACHE_EXPIRE_SECONDS, TimeUnit.SECONDS)
|
||||
.expireAfterWrite(MAX_CACHE_EXPIRE_SECONDS, TimeUnit.SECONDS).recordStats().build());
|
||||
}
|
||||
|
||||
void invalidate(K key);
|
||||
V get(K key);
|
||||
void put(K key, V value);
|
||||
}
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -20,36 +19,23 @@ import java.util.ArrayList;
|
|||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import net.helenus.core.SchemaUtil;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
|
||||
public class UnboundFacet extends Facet<String> {
|
||||
|
||||
private final List<HelenusProperty> properties;
|
||||
private final boolean alone;
|
||||
private final boolean combined;
|
||||
|
||||
public UnboundFacet(List<HelenusProperty> properties, boolean alone, boolean combined) {
|
||||
super(SchemaUtil.createPrimaryKeyPhrase(properties));
|
||||
this.properties = properties;
|
||||
this.alone = alone;
|
||||
this.combined = combined;
|
||||
}
|
||||
|
||||
public UnboundFacet(List<HelenusProperty> properties) {
|
||||
this(properties, true, true);
|
||||
}
|
||||
|
||||
public UnboundFacet(HelenusProperty property, boolean alone, boolean combined) {
|
||||
super(property.getPropertyName());
|
||||
properties = new ArrayList<HelenusProperty>();
|
||||
properties.add(property);
|
||||
this.alone = alone;
|
||||
this.combined = combined;
|
||||
super(SchemaUtil.createPrimaryKeyPhrase(properties));
|
||||
this.properties = properties;
|
||||
}
|
||||
|
||||
public UnboundFacet(HelenusProperty property) {
|
||||
this(property, true, true);
|
||||
super(property.getPropertyName());
|
||||
properties = new ArrayList<HelenusProperty>();
|
||||
properties.add(property);
|
||||
}
|
||||
|
||||
public List<HelenusProperty> getProperties() {
|
||||
|
@ -57,22 +43,18 @@ public class UnboundFacet extends Facet<String> {
|
|||
}
|
||||
|
||||
public Binder binder() {
|
||||
return new Binder(name(), properties, alone, combined);
|
||||
return new Binder(name(), properties);
|
||||
}
|
||||
|
||||
public static class Binder {
|
||||
|
||||
private final String name;
|
||||
private final boolean alone;
|
||||
private final boolean combined;
|
||||
private final List<HelenusProperty> properties = new ArrayList<HelenusProperty>();
|
||||
private Map<HelenusProperty, Object> boundProperties = new HashMap<HelenusProperty, Object>();
|
||||
|
||||
Binder(String name, List<HelenusProperty> properties, boolean alone, boolean combined) {
|
||||
Binder(String name, List<HelenusProperty> properties) {
|
||||
this.name = name;
|
||||
this.properties.addAll(properties);
|
||||
this.alone = alone;
|
||||
this.combined = combined;
|
||||
}
|
||||
|
||||
public Binder setValueForProperty(HelenusProperty prop, Object value) {
|
||||
|
@ -86,10 +68,7 @@ public class UnboundFacet extends Facet<String> {
|
|||
}
|
||||
|
||||
public BoundFacet bind() {
|
||||
BoundFacet facet = new BoundFacet(name, boundProperties);
|
||||
facet.setUniquelyIdentifyingWhenAlone(alone);
|
||||
facet.setUniquelyIdentifyingWhenCombined(combined);
|
||||
return facet;
|
||||
return new BoundFacet(name, boundProperties);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,15 +15,14 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
||||
import net.helenus.core.*;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.core.cache.UnboundFacet;
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
|
||||
public abstract class AbstractFilterOperation<E, O extends AbstractFilterOperation<E, O>>
|
||||
extends AbstractOperation<E, O> {
|
||||
extends
|
||||
AbstractOperation<E, O> {
|
||||
|
||||
protected List<Filter<?>> filters = null;
|
||||
protected List<Filter<?>> ifFilters = null;
|
||||
|
@ -110,59 +108,4 @@ public abstract class AbstractFilterOperation<E, O extends AbstractFilterOperati
|
|||
ifFilters.add(filter);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean isIdempotentOperation() {
|
||||
if (filters == null) {
|
||||
return super.isIdempotentOperation();
|
||||
}
|
||||
|
||||
return filters
|
||||
.stream()
|
||||
.anyMatch(
|
||||
filter -> {
|
||||
HelenusPropertyNode node = filter.getNode();
|
||||
if (node != null) {
|
||||
HelenusProperty prop = node.getProperty();
|
||||
if (prop != null) {
|
||||
return prop.isIdempotent();
|
||||
}
|
||||
}
|
||||
return false;
|
||||
})
|
||||
|| super.isIdempotentOperation();
|
||||
}
|
||||
|
||||
protected List<Facet> bindFacetValues(List<Facet> facets) {
|
||||
if (facets == null) {
|
||||
return new ArrayList<Facet>();
|
||||
}
|
||||
List<Facet> boundFacets = new ArrayList<>();
|
||||
Map<HelenusProperty, Filter> filterMap = new HashMap<>(filters.size());
|
||||
filters.forEach(f -> filterMap.put(f.getNode().getProperty(), f));
|
||||
|
||||
for (Facet facet : facets) {
|
||||
if (facet instanceof UnboundFacet) {
|
||||
UnboundFacet unboundFacet = (UnboundFacet) facet;
|
||||
UnboundFacet.Binder binder = unboundFacet.binder();
|
||||
if (filters != null) {
|
||||
for (HelenusProperty prop : unboundFacet.getProperties()) {
|
||||
|
||||
Filter filter = filterMap.get(prop);
|
||||
if (filter != null) {
|
||||
Object[] postulates = filter.postulateValues();
|
||||
for (Object p : postulates) {
|
||||
binder.setValueForProperty(prop, p.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (binder.isBound()) {
|
||||
boundFacets.add(binder.bind());
|
||||
}
|
||||
} else {
|
||||
boundFacets.add(facet);
|
||||
}
|
||||
}
|
||||
return boundFacets;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -20,12 +19,13 @@ import java.util.LinkedHashMap;
|
|||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import net.helenus.core.*;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
|
||||
public abstract class AbstractFilterOptionalOperation<
|
||||
E, O extends AbstractFilterOptionalOperation<E, O>>
|
||||
extends AbstractOptionalOperation<E, O> {
|
||||
public abstract class AbstractFilterOptionalOperation<E, O extends AbstractFilterOptionalOperation<E, O>>
|
||||
extends
|
||||
AbstractOptionalOperation<E, O> {
|
||||
|
||||
protected Map<HelenusProperty, Filter<?>> filters = null;
|
||||
protected List<Filter<?>> ifFilters = null;
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -20,12 +19,13 @@ import java.util.LinkedHashMap;
|
|||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import net.helenus.core.*;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
|
||||
public abstract class AbstractFilterStreamOperation<
|
||||
E, O extends AbstractFilterStreamOperation<E, O>>
|
||||
extends AbstractStreamOperation<E, O> {
|
||||
public abstract class AbstractFilterStreamOperation<E, O extends AbstractFilterStreamOperation<E, O>>
|
||||
extends
|
||||
AbstractStreamOperation<E, O> {
|
||||
|
||||
protected Map<HelenusProperty, Filter<?>> filters = null;
|
||||
protected List<Filter<?>> ifFilters = null;
|
||||
|
@ -43,7 +43,7 @@ public abstract class AbstractFilterStreamOperation<
|
|||
|
||||
public <V> O where(Getter<V> getter, Operator operator, V val) {
|
||||
|
||||
if (val != null) addFilter(Filter.create(getter, operator, val));
|
||||
addFilter(Filter.create(getter, operator, val));
|
||||
|
||||
return (O) this;
|
||||
}
|
||||
|
@ -64,7 +64,7 @@ public abstract class AbstractFilterStreamOperation<
|
|||
|
||||
public <V> O and(Getter<V> getter, Operator operator, V val) {
|
||||
|
||||
if (val != null) addFilter(Filter.create(getter, operator, val));
|
||||
addFilter(Filter.create(getter, operator, val));
|
||||
|
||||
return (O) this;
|
||||
}
|
||||
|
@ -85,7 +85,7 @@ public abstract class AbstractFilterStreamOperation<
|
|||
|
||||
public <V> O onlyIf(Getter<V> getter, Operator operator, V val) {
|
||||
|
||||
if (val != null) addIfFilter(Filter.create(getter, operator, val));
|
||||
addIfFilter(Filter.create(getter, operator, val));
|
||||
|
||||
return (O) this;
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,16 +15,17 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import com.codahale.metrics.Timer;
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.CompletionException;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import com.codahale.metrics.Timer;
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
|
||||
import net.helenus.core.AbstractSessionOperations;
|
||||
import net.helenus.core.UnitOfWork;
|
||||
|
||||
public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>>
|
||||
extends AbstractStatementOperation<E, O> {
|
||||
public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>> extends AbstractStatementOperation<E, O> {
|
||||
|
||||
public AbstractOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
|
@ -40,9 +40,8 @@ public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>>
|
|||
public E sync() throws TimeoutException {
|
||||
final Timer.Context context = requestLatency.time();
|
||||
try {
|
||||
ResultSet resultSet =
|
||||
this.execute(
|
||||
sessionOps, null, queryExecutionTimeout, queryTimeoutUnits, showValues, false);
|
||||
ResultSet resultSet = this.execute(sessionOps, null, traceContext, queryExecutionTimeout, queryTimeoutUnits,
|
||||
showValues, false);
|
||||
return transform(resultSet);
|
||||
} finally {
|
||||
context.stop();
|
||||
|
@ -50,12 +49,13 @@ public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>>
|
|||
}
|
||||
|
||||
public E sync(UnitOfWork uow) throws TimeoutException {
|
||||
if (uow == null) return sync();
|
||||
if (uow == null)
|
||||
return sync();
|
||||
|
||||
final Timer.Context context = requestLatency.time();
|
||||
try {
|
||||
ResultSet resultSet =
|
||||
execute(sessionOps, uow, queryExecutionTimeout, queryTimeoutUnits, showValues, true);
|
||||
ResultSet resultSet = execute(sessionOps, uow, traceContext, queryExecutionTimeout, queryTimeoutUnits,
|
||||
showValues, true);
|
||||
E result = transform(resultSet);
|
||||
return result;
|
||||
} finally {
|
||||
|
@ -64,8 +64,7 @@ public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>>
|
|||
}
|
||||
|
||||
public CompletableFuture<E> async() {
|
||||
return CompletableFuture.<E>supplyAsync(
|
||||
() -> {
|
||||
return CompletableFuture.<E>supplyAsync(() -> {
|
||||
try {
|
||||
return sync();
|
||||
} catch (TimeoutException ex) {
|
||||
|
@ -75,17 +74,14 @@ public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>>
|
|||
}
|
||||
|
||||
public CompletableFuture<E> async(UnitOfWork uow) {
|
||||
if (uow == null) return async();
|
||||
CompletableFuture<E> f =
|
||||
CompletableFuture.<E>supplyAsync(
|
||||
() -> {
|
||||
if (uow == null)
|
||||
return async();
|
||||
return CompletableFuture.<E>supplyAsync(() -> {
|
||||
try {
|
||||
return sync();
|
||||
} catch (TimeoutException ex) {
|
||||
throw new CompletionException(ex);
|
||||
}
|
||||
});
|
||||
uow.addFuture(f);
|
||||
return f;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -18,6 +17,12 @@ package net.helenus.core.operation;
|
|||
|
||||
import static net.helenus.core.HelenusSession.deleted;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.CompletionException;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import com.codahale.metrics.Timer;
|
||||
import com.datastax.driver.core.PreparedStatement;
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
|
@ -25,23 +30,15 @@ import com.google.common.base.Function;
|
|||
import com.google.common.base.Stopwatch;
|
||||
import com.google.common.util.concurrent.Futures;
|
||||
import com.google.common.util.concurrent.ListenableFuture;
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.CompletionException;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import net.helenus.core.AbstractSessionOperations;
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.UnitOfWork;
|
||||
import net.helenus.core.cache.CacheUtil;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.mapping.MappingUtil;
|
||||
import net.helenus.support.Fun;
|
||||
import org.apache.commons.lang3.SerializationUtils;
|
||||
|
||||
public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOperation<E, O>>
|
||||
extends AbstractStatementOperation<E, O> {
|
||||
extends
|
||||
AbstractStatementOperation<E, O> {
|
||||
|
||||
public AbstractOptionalOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
|
@ -55,8 +52,7 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
|
|||
|
||||
public ListenableFuture<PreparedOptionalOperation<E>> prepareAsync() {
|
||||
final O _this = (O) this;
|
||||
return Futures.transform(
|
||||
prepareStatementAsync(),
|
||||
return Futures.transform(prepareStatementAsync(),
|
||||
new Function<PreparedStatement, PreparedOptionalOperation<E>>() {
|
||||
@Override
|
||||
public PreparedOptionalOperation<E> apply(PreparedStatement preparedStatement) {
|
||||
|
@ -70,12 +66,10 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
|
|||
try {
|
||||
Optional<E> result = Optional.empty();
|
||||
E cacheResult = null;
|
||||
boolean updateCache = isSessionCacheable() && !ignoreCache();
|
||||
boolean updateCache = isSessionCacheable();
|
||||
|
||||
if (updateCache) {
|
||||
if (enableCache && isSessionCacheable()) {
|
||||
List<Facet> facets = bindFacetValues();
|
||||
if (facets != null && facets.size() > 0) {
|
||||
if (facets.stream().filter(f -> !f.fixed()).distinct().count() > 0) {
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
cacheResult = (E) sessionOps.checkCache(tableName, facets);
|
||||
if (cacheResult != null) {
|
||||
|
@ -88,35 +82,20 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
|
|||
cacheMiss.mark();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
//TODO(gburd): look in statement cache for results
|
||||
}
|
||||
}
|
||||
|
||||
if (!result.isPresent()) {
|
||||
// Formulate the query and execute it against the Cassandra cluster.
|
||||
ResultSet resultSet =
|
||||
this.execute(
|
||||
sessionOps,
|
||||
null,
|
||||
queryExecutionTimeout,
|
||||
queryTimeoutUnits,
|
||||
showValues,
|
||||
isSessionCacheable());
|
||||
ResultSet resultSet = this.execute(sessionOps, null, traceContext, queryExecutionTimeout,
|
||||
queryTimeoutUnits, showValues, false);
|
||||
|
||||
// Transform the query result set into the desired shape.
|
||||
result = transform(resultSet);
|
||||
}
|
||||
|
||||
if (updateCache && result.isPresent()) {
|
||||
E r = result.get();
|
||||
Class<?> resultClass = r.getClass();
|
||||
if (!(resultClass.getEnclosingClass() != null
|
||||
&& resultClass.getEnclosingClass() == Fun.class)) {
|
||||
List<Facet> facets = getFacets();
|
||||
if (facets != null && facets.size() > 1) {
|
||||
sessionOps.updateCache(r, facets);
|
||||
}
|
||||
sessionOps.updateCache(result.get(), facets);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
|
@ -125,8 +104,9 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
|
|||
}
|
||||
}
|
||||
|
||||
public Optional<E> sync(UnitOfWork uow) throws TimeoutException {
|
||||
if (uow == null) return sync();
|
||||
public Optional<E> sync(UnitOfWork<?> uow) throws TimeoutException {
|
||||
if (uow == null)
|
||||
return sync();
|
||||
|
||||
final Timer.Context context = requestLatency.time();
|
||||
try {
|
||||
|
@ -135,12 +115,11 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
|
|||
E cachedResult = null;
|
||||
final boolean updateCache;
|
||||
|
||||
if (!ignoreCache()) {
|
||||
if (enableCache) {
|
||||
Stopwatch timer = Stopwatch.createStarted();
|
||||
try {
|
||||
List<Facet> facets = bindFacetValues();
|
||||
if (facets != null && facets.size() > 0) {
|
||||
if (facets.stream().filter(f -> !f.fixed()).distinct().count() > 0) {
|
||||
if (facets != null) {
|
||||
cachedResult = checkCache(uow, facets);
|
||||
if (cachedResult != null) {
|
||||
updateCache = false;
|
||||
|
@ -149,41 +128,23 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
|
|||
cacheHits.mark();
|
||||
uow.recordCacheAndDatabaseOperationCount(1, 0);
|
||||
} else {
|
||||
updateCache = true;
|
||||
uowCacheMiss.mark();
|
||||
if (isSessionCacheable()) {
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
cachedResult = (E) sessionOps.checkCache(tableName, facets);
|
||||
if (cachedResult != null) {
|
||||
Class<?> iface = MappingUtil.getMappingInterface(cachedResult);
|
||||
if (Helenus.entity(iface).isDraftable()) {
|
||||
result = Optional.of(cachedResult);
|
||||
} else {
|
||||
result =
|
||||
Optional.of(
|
||||
(E)
|
||||
SerializationUtils.<Serializable>clone(
|
||||
(Serializable) cachedResult));
|
||||
}
|
||||
updateCache = false;
|
||||
sessionCacheHits.mark();
|
||||
cacheHits.mark();
|
||||
uow.recordCacheAndDatabaseOperationCount(1, 0);
|
||||
} else {
|
||||
updateCache = true;
|
||||
sessionCacheMiss.mark();
|
||||
cacheMiss.mark();
|
||||
uow.recordCacheAndDatabaseOperationCount(-1, 0);
|
||||
}
|
||||
} else {
|
||||
updateCache = false;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
//TODO(gburd): look in statement cache for results
|
||||
updateCache = false; //true;
|
||||
cacheMiss.mark();
|
||||
uow.recordCacheAndDatabaseOperationCount(-1, 0);
|
||||
}
|
||||
} else {
|
||||
updateCache = false;
|
||||
}
|
||||
|
@ -205,8 +166,8 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
|
|||
} else {
|
||||
|
||||
// Formulate the query and execute it against the Cassandra cluster.
|
||||
ResultSet resultSet =
|
||||
execute(sessionOps, uow, queryExecutionTimeout, queryTimeoutUnits, showValues, true);
|
||||
ResultSet resultSet = execute(sessionOps, uow, traceContext, queryExecutionTimeout, queryTimeoutUnits,
|
||||
showValues, true);
|
||||
|
||||
// Transform the query result set into the desired shape.
|
||||
result = transform(resultSet);
|
||||
|
@ -214,11 +175,8 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
|
|||
|
||||
// If we have a result, it wasn't from the UOW cache, and we're caching things
|
||||
// then we need to put this result into the cache for future requests to find.
|
||||
if (updateCache && result.isPresent()) {
|
||||
E r = result.get();
|
||||
if (!(r instanceof Fun) && r != deleted) {
|
||||
cacheUpdate(uow, r, getFacets());
|
||||
}
|
||||
if (updateCache && result.isPresent() && result.get() != deleted) {
|
||||
cacheUpdate(uow, result.get(), getFacets());
|
||||
}
|
||||
|
||||
return result;
|
||||
|
@ -228,8 +186,7 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
|
|||
}
|
||||
|
||||
public CompletableFuture<Optional<E>> async() {
|
||||
return CompletableFuture.<Optional<E>>supplyAsync(
|
||||
() -> {
|
||||
return CompletableFuture.<Optional<E>>supplyAsync(() -> {
|
||||
try {
|
||||
return sync();
|
||||
} catch (TimeoutException ex) {
|
||||
|
@ -238,18 +195,15 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
|
|||
});
|
||||
}
|
||||
|
||||
public CompletableFuture<Optional<E>> async(UnitOfWork uow) {
|
||||
if (uow == null) return async();
|
||||
CompletableFuture<Optional<E>> f =
|
||||
CompletableFuture.<Optional<E>>supplyAsync(
|
||||
() -> {
|
||||
public CompletableFuture<Optional<E>> async(UnitOfWork<?> uow) {
|
||||
if (uow == null)
|
||||
return async();
|
||||
return CompletableFuture.<Optional<E>>supplyAsync(() -> {
|
||||
try {
|
||||
return sync();
|
||||
} catch (TimeoutException ex) {
|
||||
throw new CompletionException(ex);
|
||||
}
|
||||
});
|
||||
uow.addFuture(f);
|
||||
return f;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,6 +15,12 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import com.datastax.driver.core.ConsistencyLevel;
|
||||
import com.datastax.driver.core.PreparedStatement;
|
||||
import com.datastax.driver.core.RegularStatement;
|
||||
|
@ -26,11 +31,9 @@ import com.datastax.driver.core.policies.FallthroughRetryPolicy;
|
|||
import com.datastax.driver.core.policies.RetryPolicy;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
import com.google.common.util.concurrent.ListenableFuture;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import brave.Tracer;
|
||||
import brave.propagation.TraceContext;
|
||||
import net.helenus.core.AbstractSessionOperations;
|
||||
import net.helenus.core.UnitOfWork;
|
||||
import net.helenus.core.cache.Facet;
|
||||
|
@ -40,16 +43,20 @@ import net.helenus.mapping.HelenusProperty;
|
|||
import net.helenus.mapping.value.BeanColumnValueProvider;
|
||||
import net.helenus.support.HelenusException;
|
||||
|
||||
public abstract class AbstractStatementOperation<E, O extends AbstractStatementOperation<E, O>>
|
||||
extends Operation<E> {
|
||||
private boolean ignoreCache = false;
|
||||
public abstract class AbstractStatementOperation<E, O extends AbstractStatementOperation<E, O>> extends Operation<E> {
|
||||
|
||||
protected boolean enableCache = true;
|
||||
protected boolean showValues = true;
|
||||
protected TraceContext traceContext;
|
||||
long queryExecutionTimeout = 10;
|
||||
TimeUnit queryTimeoutUnits = TimeUnit.SECONDS;
|
||||
private ConsistencyLevel consistencyLevel;
|
||||
private ConsistencyLevel serialConsistencyLevel;
|
||||
private RetryPolicy retryPolicy;
|
||||
private boolean idempotent = false;
|
||||
private boolean enableTracing = false;
|
||||
private long[] defaultTimestamp = null;
|
||||
private int[] fetchSize = null;
|
||||
protected boolean idempotent = false;
|
||||
|
||||
public AbstractStatementOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
|
@ -59,13 +66,13 @@ public abstract class AbstractStatementOperation<E, O extends AbstractStatementO
|
|||
|
||||
public abstract Statement buildStatement(boolean cached);
|
||||
|
||||
public O uncached(boolean enabled) {
|
||||
ignoreCache = !enabled;
|
||||
public O ignoreCache(boolean enabled) {
|
||||
enableCache = enabled;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public O uncached() {
|
||||
ignoreCache = true;
|
||||
public O ignoreCache() {
|
||||
enableCache = true;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
|
@ -246,16 +253,22 @@ public abstract class AbstractStatementOperation<E, O extends AbstractStatementO
|
|||
statement.setFetchSize(fetchSize[0]);
|
||||
}
|
||||
|
||||
if (isIdempotentOperation()) {
|
||||
if (idempotent) {
|
||||
statement.setIdempotent(true);
|
||||
}
|
||||
|
||||
return statement;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean isIdempotentOperation() {
|
||||
return idempotent;
|
||||
public O zipkinContext(TraceContext traceContext) {
|
||||
if (traceContext != null) {
|
||||
Tracer tracer = this.sessionOps.getZipkinTracer();
|
||||
if (tracer != null) {
|
||||
this.traceContext = traceContext;
|
||||
}
|
||||
}
|
||||
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public Statement statement() {
|
||||
|
@ -264,7 +277,8 @@ public abstract class AbstractStatementOperation<E, O extends AbstractStatementO
|
|||
|
||||
public String cql() {
|
||||
Statement statement = buildStatement(false);
|
||||
if (statement == null) return "";
|
||||
if (statement == null)
|
||||
return "";
|
||||
if (statement instanceof BuiltStatement) {
|
||||
BuiltStatement buildStatement = (BuiltStatement) statement;
|
||||
return buildStatement.setForceNoValues(true).getQueryString();
|
||||
|
@ -301,11 +315,7 @@ public abstract class AbstractStatementOperation<E, O extends AbstractStatementO
|
|||
throw new HelenusException("only RegularStatements can be prepared");
|
||||
}
|
||||
|
||||
protected boolean ignoreCache() {
|
||||
return ignoreCache;
|
||||
}
|
||||
|
||||
protected E checkCache(UnitOfWork uow, List<Facet> facets) {
|
||||
protected E checkCache(UnitOfWork<?> uow, List<Facet> facets) {
|
||||
E result = null;
|
||||
Optional<Object> optionalCachedResult = Optional.empty();
|
||||
|
||||
|
@ -319,10 +329,9 @@ public abstract class AbstractStatementOperation<E, O extends AbstractStatementO
|
|||
return result;
|
||||
}
|
||||
|
||||
protected Object cacheUpdate(UnitOfWork uow, E pojo, List<Facet> identifyingFacets) {
|
||||
protected void cacheUpdate(UnitOfWork<?> uow, E pojo, List<Facet> identifyingFacets) {
|
||||
List<Facet> facets = new ArrayList<>();
|
||||
Map<String, Object> valueMap =
|
||||
pojo instanceof MapExportable ? ((MapExportable) pojo).toMap() : null;
|
||||
Map<String, Object> valueMap = pojo instanceof MapExportable ? ((MapExportable) pojo).toMap() : null;
|
||||
|
||||
for (Facet facet : identifyingFacets) {
|
||||
if (facet instanceof UnboundFacet) {
|
||||
|
@ -351,6 +360,6 @@ public abstract class AbstractStatementOperation<E, O extends AbstractStatementO
|
|||
}
|
||||
|
||||
// Cache the value (pojo), the statement key, and the fully bound facets.
|
||||
return uow.cacheUpdate(pojo, facets);
|
||||
uow.cacheUpdate(pojo, facets);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -18,6 +17,13 @@ package net.helenus.core.operation;
|
|||
|
||||
import static net.helenus.core.HelenusSession.deleted;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.CompletionException;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import com.codahale.metrics.Timer;
|
||||
import com.datastax.driver.core.PreparedStatement;
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
|
@ -25,24 +31,15 @@ import com.google.common.base.Function;
|
|||
import com.google.common.base.Stopwatch;
|
||||
import com.google.common.util.concurrent.Futures;
|
||||
import com.google.common.util.concurrent.ListenableFuture;
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.CompletionException;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import net.helenus.core.AbstractSessionOperations;
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.UnitOfWork;
|
||||
import net.helenus.core.cache.CacheUtil;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.mapping.MappingUtil;
|
||||
import net.helenus.support.Fun;
|
||||
import org.apache.commons.lang3.SerializationUtils;
|
||||
|
||||
public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperation<E, O>>
|
||||
extends AbstractStatementOperation<E, O> {
|
||||
extends
|
||||
AbstractStatementOperation<E, O> {
|
||||
|
||||
public AbstractStreamOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
|
@ -56,8 +53,7 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
|
|||
|
||||
public ListenableFuture<PreparedStreamOperation<E>> prepareAsync() {
|
||||
final O _this = (O) this;
|
||||
return Futures.transform(
|
||||
prepareStatementAsync(),
|
||||
return Futures.transform(prepareStatementAsync(),
|
||||
new Function<PreparedStatement, PreparedStreamOperation<E>>() {
|
||||
@Override
|
||||
public PreparedStreamOperation<E> apply(PreparedStatement preparedStatement) {
|
||||
|
@ -73,10 +69,8 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
|
|||
E cacheResult = null;
|
||||
boolean updateCache = isSessionCacheable();
|
||||
|
||||
if (!ignoreCache() && isSessionCacheable()) {
|
||||
if (enableCache && isSessionCacheable()) {
|
||||
List<Facet> facets = bindFacetValues();
|
||||
if (facets != null && facets.size() > 0) {
|
||||
if (facets.stream().filter(f -> !f.fixed()).distinct().count() > 0) {
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
cacheResult = (E) sessionOps.checkCache(tableName, facets);
|
||||
if (cacheResult != null) {
|
||||
|
@ -88,22 +82,12 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
|
|||
sessionCacheMiss.mark();
|
||||
cacheMiss.mark();
|
||||
}
|
||||
} else {
|
||||
//TODO(gburd): look in statement cache for results
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (resultStream == null) {
|
||||
// Formulate the query and execute it against the Cassandra cluster.
|
||||
ResultSet resultSet =
|
||||
this.execute(
|
||||
sessionOps,
|
||||
null,
|
||||
queryExecutionTimeout,
|
||||
queryTimeoutUnits,
|
||||
showValues,
|
||||
isSessionCacheable());
|
||||
ResultSet resultSet = this.execute(sessionOps, null, traceContext, queryExecutionTimeout,
|
||||
queryTimeoutUnits, showValues, false);
|
||||
|
||||
// Transform the query result set into the desired shape.
|
||||
resultStream = transform(resultSet);
|
||||
|
@ -113,13 +97,8 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
|
|||
List<Facet> facets = getFacets();
|
||||
if (facets != null && facets.size() > 1) {
|
||||
List<E> again = new ArrayList<>();
|
||||
resultStream.forEach(
|
||||
result -> {
|
||||
Class<?> resultClass = result.getClass();
|
||||
if (!(resultClass.getEnclosingClass() != null
|
||||
&& resultClass.getEnclosingClass() == Fun.class)) {
|
||||
resultStream.forEach(result -> {
|
||||
sessionOps.updateCache(result, facets);
|
||||
}
|
||||
again.add(result);
|
||||
});
|
||||
resultStream = again.stream();
|
||||
|
@ -133,7 +112,8 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
|
|||
}
|
||||
|
||||
public Stream<E> sync(UnitOfWork uow) throws TimeoutException {
|
||||
if (uow == null) return sync();
|
||||
if (uow == null)
|
||||
return sync();
|
||||
|
||||
final Timer.Context context = requestLatency.time();
|
||||
try {
|
||||
|
@ -141,12 +121,11 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
|
|||
E cachedResult = null;
|
||||
final boolean updateCache;
|
||||
|
||||
if (!ignoreCache()) {
|
||||
if (enableCache) {
|
||||
Stopwatch timer = Stopwatch.createStarted();
|
||||
try {
|
||||
List<Facet> facets = bindFacetValues();
|
||||
if (facets != null && facets.size() > 0) {
|
||||
if (facets.stream().filter(f -> !f.fixed()).distinct().count() > 0) {
|
||||
if (facets != null) {
|
||||
cachedResult = checkCache(uow, facets);
|
||||
if (cachedResult != null) {
|
||||
updateCache = false;
|
||||
|
@ -155,40 +134,23 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
|
|||
cacheHits.mark();
|
||||
uow.recordCacheAndDatabaseOperationCount(1, 0);
|
||||
} else {
|
||||
updateCache = true;
|
||||
uowCacheMiss.mark();
|
||||
if (isSessionCacheable()) {
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
cachedResult = (E) sessionOps.checkCache(tableName, facets);
|
||||
if (cachedResult != null) {
|
||||
Class<?> iface = MappingUtil.getMappingInterface(cachedResult);
|
||||
E result = null;
|
||||
if (Helenus.entity(iface).isDraftable()) {
|
||||
result = cachedResult;
|
||||
} else {
|
||||
result =
|
||||
(E) SerializationUtils.<Serializable>clone((Serializable) cachedResult);
|
||||
}
|
||||
updateCache = false;
|
||||
resultStream = Stream.of(result);
|
||||
resultStream = Stream.of(cachedResult);
|
||||
sessionCacheHits.mark();
|
||||
cacheHits.mark();
|
||||
uow.recordCacheAndDatabaseOperationCount(1, 0);
|
||||
} else {
|
||||
updateCache = true;
|
||||
sessionCacheMiss.mark();
|
||||
cacheMiss.mark();
|
||||
uow.recordCacheAndDatabaseOperationCount(-1, 0);
|
||||
}
|
||||
} else {
|
||||
updateCache = false;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
//TODO(gburd): look in statement cache for results
|
||||
updateCache = false; //true;
|
||||
cacheMiss.mark();
|
||||
uow.recordCacheAndDatabaseOperationCount(-1, 0);
|
||||
}
|
||||
} else {
|
||||
updateCache = false;
|
||||
}
|
||||
|
@ -202,30 +164,26 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
|
|||
|
||||
// Check to see if we fetched the object from the cache
|
||||
if (resultStream == null) {
|
||||
ResultSet resultSet =
|
||||
execute(sessionOps, uow, queryExecutionTimeout, queryTimeoutUnits, showValues, true);
|
||||
ResultSet resultSet = execute(sessionOps, uow, traceContext, queryExecutionTimeout, queryTimeoutUnits,
|
||||
showValues, true);
|
||||
resultStream = transform(resultSet);
|
||||
}
|
||||
|
||||
// If we have a result and we're caching then we need to put it into the cache
|
||||
// for future requests to find.
|
||||
if (resultStream != null) {
|
||||
if (updateCache) {
|
||||
List<E> again = new ArrayList<>();
|
||||
List<Facet> facets = getFacets();
|
||||
resultStream.forEach(
|
||||
result -> {
|
||||
Class<?> resultClass = result.getClass();
|
||||
if (result != deleted
|
||||
&& !(resultClass.getEnclosingClass() != null
|
||||
&& resultClass.getEnclosingClass() == Fun.class)) {
|
||||
result = (E) cacheUpdate(uow, result, facets);
|
||||
resultStream.forEach(result -> {
|
||||
if (result != deleted) {
|
||||
if (updateCache) {
|
||||
cacheUpdate(uow, result, facets);
|
||||
}
|
||||
again.add(result);
|
||||
}
|
||||
});
|
||||
resultStream = again.stream();
|
||||
}
|
||||
}
|
||||
|
||||
return resultStream;
|
||||
} finally {
|
||||
|
@ -234,8 +192,7 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
|
|||
}
|
||||
|
||||
public CompletableFuture<Stream<E>> async() {
|
||||
return CompletableFuture.<Stream<E>>supplyAsync(
|
||||
() -> {
|
||||
return CompletableFuture.<Stream<E>>supplyAsync(() -> {
|
||||
try {
|
||||
return sync();
|
||||
} catch (TimeoutException ex) {
|
||||
|
@ -245,17 +202,14 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
|
|||
}
|
||||
|
||||
public CompletableFuture<Stream<E>> async(UnitOfWork uow) {
|
||||
if (uow == null) return async();
|
||||
CompletableFuture<Stream<E>> f =
|
||||
CompletableFuture.<Stream<E>>supplyAsync(
|
||||
() -> {
|
||||
if (uow == null)
|
||||
return async();
|
||||
return CompletableFuture.<Stream<E>>supplyAsync(() -> {
|
||||
try {
|
||||
return sync();
|
||||
} catch (TimeoutException ex) {
|
||||
throw new CompletionException(ex);
|
||||
}
|
||||
});
|
||||
uow.addFuture(f);
|
||||
return f;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,140 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import com.codahale.metrics.Timer;
|
||||
import com.datastax.driver.core.AtomicMonotonicTimestampGenerator;
|
||||
import com.datastax.driver.core.BatchStatement;
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.TimestampGenerator;
|
||||
import com.google.common.base.Stopwatch;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.stream.Collectors;
|
||||
import net.helenus.core.AbstractSessionOperations;
|
||||
import net.helenus.core.UnitOfWork;
|
||||
import net.helenus.support.HelenusException;
|
||||
|
||||
public class BatchOperation extends Operation<Long> {
|
||||
//TODO(gburd): find the way to get the driver's timestamp generator
|
||||
private static final TimestampGenerator timestampGenerator =
|
||||
new AtomicMonotonicTimestampGenerator();
|
||||
|
||||
private final BatchStatement batch;
|
||||
private List<AbstractOperation<?, ?>> operations = new ArrayList<AbstractOperation<?, ?>>();
|
||||
private boolean logged = true;
|
||||
|
||||
public BatchOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
batch = new BatchStatement();
|
||||
}
|
||||
|
||||
public void add(AbstractOperation<?, ?> operation) {
|
||||
operations.add(operation);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BatchStatement buildStatement(boolean cached) {
|
||||
batch.addAll(
|
||||
operations.stream().map(o -> o.buildStatement(cached)).collect(Collectors.toList()));
|
||||
batch.setConsistencyLevel(sessionOps.getDefaultConsistencyLevel());
|
||||
return batch;
|
||||
}
|
||||
|
||||
public BatchOperation logged() {
|
||||
logged = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
public BatchOperation setLogged(boolean logStatements) {
|
||||
logged = logStatements;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Long sync() {
|
||||
if (operations.size() == 0) return 0L;
|
||||
final Timer.Context context = requestLatency.time();
|
||||
try {
|
||||
batch.setDefaultTimestamp(timestampGenerator.next());
|
||||
ResultSet resultSet =
|
||||
this.execute(
|
||||
sessionOps, null, queryExecutionTimeout, queryTimeoutUnits, showValues, false);
|
||||
if (!resultSet.wasApplied()) {
|
||||
throw new HelenusException("Failed to apply batch.");
|
||||
}
|
||||
} catch (TimeoutException e) {
|
||||
throw new HelenusException(e);
|
||||
} finally {
|
||||
context.stop();
|
||||
}
|
||||
return batch.getDefaultTimestamp();
|
||||
}
|
||||
|
||||
public Long sync(UnitOfWork uow) {
|
||||
if (operations.size() == 0) return 0L;
|
||||
if (uow == null) return sync();
|
||||
|
||||
final Timer.Context context = requestLatency.time();
|
||||
final Stopwatch timer = Stopwatch.createStarted();
|
||||
try {
|
||||
uow.recordCacheAndDatabaseOperationCount(0, 1);
|
||||
batch.setDefaultTimestamp(timestampGenerator.next());
|
||||
ResultSet resultSet =
|
||||
this.execute(
|
||||
sessionOps, uow, queryExecutionTimeout, queryTimeoutUnits, showValues, false);
|
||||
if (!resultSet.wasApplied()) {
|
||||
throw new HelenusException("Failed to apply batch.");
|
||||
}
|
||||
} catch (TimeoutException e) {
|
||||
throw new HelenusException(e);
|
||||
} finally {
|
||||
context.stop();
|
||||
timer.stop();
|
||||
}
|
||||
uow.addDatabaseTime("Cassandra", timer);
|
||||
return batch.getDefaultTimestamp();
|
||||
}
|
||||
|
||||
public void addAll(BatchOperation batch) {
|
||||
batch.operations.forEach(o -> this.operations.add(o));
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return toString(true); //TODO(gburd): sessionOps.showQueryValues()
|
||||
}
|
||||
|
||||
public String toString(boolean showValues) {
|
||||
StringBuilder s = new StringBuilder();
|
||||
s.append("BEGIN ");
|
||||
if (!logged) {
|
||||
s.append("UNLOGGED ");
|
||||
}
|
||||
s.append("BATCH ");
|
||||
|
||||
if (batch.getDefaultTimestamp() > -9223372036854775808L) {
|
||||
s.append("USING TIMESTAMP ").append(String.valueOf(batch.getDefaultTimestamp())).append(" ");
|
||||
}
|
||||
s.append(
|
||||
operations
|
||||
.stream()
|
||||
.map(o -> Operation.queryString(o.buildStatement(showValues), showValues))
|
||||
.collect(Collectors.joining(" ")));
|
||||
s.append(" APPLY BATCH;");
|
||||
return s.toString();
|
||||
}
|
||||
}
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,19 +15,18 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
import com.datastax.driver.core.BoundStatement;
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.Statement;
|
||||
import java.util.Optional;
|
||||
|
||||
public final class BoundOptionalOperation<E>
|
||||
extends AbstractOptionalOperation<E, BoundOptionalOperation<E>> {
|
||||
public final class BoundOptionalOperation<E> extends AbstractOptionalOperation<E, BoundOptionalOperation<E>> {
|
||||
|
||||
private final BoundStatement boundStatement;
|
||||
private final AbstractOptionalOperation<E, ?> delegate;
|
||||
|
||||
public BoundOptionalOperation(
|
||||
BoundStatement boundStatement, AbstractOptionalOperation<E, ?> operation) {
|
||||
public BoundOptionalOperation(BoundStatement boundStatement, AbstractOptionalOperation<E, ?> operation) {
|
||||
super(operation.sessionOps);
|
||||
this.boundStatement = boundStatement;
|
||||
this.delegate = operation;
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,21 +15,21 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import com.datastax.driver.core.BoundStatement;
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.Statement;
|
||||
import java.util.List;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import net.helenus.core.cache.Facet;
|
||||
|
||||
public final class BoundStreamOperation<E>
|
||||
extends AbstractStreamOperation<E, BoundStreamOperation<E>> {
|
||||
public final class BoundStreamOperation<E> extends AbstractStreamOperation<E, BoundStreamOperation<E>> {
|
||||
|
||||
private final BoundStatement boundStatement;
|
||||
private final AbstractStreamOperation<E, ?> delegate;
|
||||
|
||||
public BoundStreamOperation(
|
||||
BoundStatement boundStatement, AbstractStreamOperation<E, ?> operation) {
|
||||
public BoundStreamOperation(BoundStatement boundStatement, AbstractStreamOperation<E, ?> operation) {
|
||||
super(operation.sessionOps);
|
||||
this.boundStatement = boundStatement;
|
||||
this.delegate = operation;
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -21,6 +20,7 @@ import com.datastax.driver.core.querybuilder.BuiltStatement;
|
|||
import com.datastax.driver.core.querybuilder.QueryBuilder;
|
||||
import com.datastax.driver.core.querybuilder.Select;
|
||||
import com.datastax.driver.core.querybuilder.Select.Where;
|
||||
|
||||
import net.helenus.core.AbstractSessionOperations;
|
||||
import net.helenus.core.Filter;
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
|
@ -38,7 +38,6 @@ public final class CountOperation extends AbstractFilterOperation<Long, CountOpe
|
|||
public CountOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity) {
|
||||
super(sessionOperations);
|
||||
this.entity = entity;
|
||||
//TODO(gburd): cache SELECT COUNT results within the scope of a UOW
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -75,11 +74,8 @@ public final class CountOperation extends AbstractFilterOperation<Long, CountOpe
|
|||
if (entity == null) {
|
||||
entity = p.getEntity();
|
||||
} else if (entity != p.getEntity()) {
|
||||
throw new HelenusMappingException(
|
||||
"you can count columns only in single entity "
|
||||
+ entity.getMappingInterface()
|
||||
+ " or "
|
||||
+ p.getEntity().getMappingInterface());
|
||||
throw new HelenusMappingException("you can count columns only in single entity "
|
||||
+ entity.getMappingInterface() + " or " + p.getEntity().getMappingInterface());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,20 +15,26 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
import com.datastax.driver.core.querybuilder.Delete;
|
||||
import com.datastax.driver.core.querybuilder.Delete.Where;
|
||||
import com.datastax.driver.core.querybuilder.QueryBuilder;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import net.helenus.core.AbstractSessionOperations;
|
||||
import net.helenus.core.Filter;
|
||||
import net.helenus.core.UnitOfWork;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.core.cache.UnboundFacet;
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.support.HelenusException;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
import net.helenus.support.HelenusMappingException;
|
||||
|
||||
public final class DeleteOperation extends AbstractFilterOperation<ResultSet, DeleteOperation> {
|
||||
|
@ -123,20 +128,44 @@ public final class DeleteOperation extends AbstractFilterOperation<ResultSet, De
|
|||
if (entity == null) {
|
||||
entity = p.getEntity();
|
||||
} else if (entity != p.getEntity()) {
|
||||
throw new HelenusMappingException(
|
||||
"you can delete rows only in single entity "
|
||||
+ entity.getMappingInterface()
|
||||
+ " or "
|
||||
+ p.getEntity().getMappingInterface());
|
||||
throw new HelenusMappingException("you can delete rows only in single entity "
|
||||
+ entity.getMappingInterface() + " or " + p.getEntity().getMappingInterface());
|
||||
}
|
||||
}
|
||||
|
||||
public List<Facet> bindFacetValues() {
|
||||
return bindFacetValues(getFacets());
|
||||
public List<Facet> bindFacetValues(List<Facet> facets) {
|
||||
if (facets == null) {
|
||||
return new ArrayList<Facet>();
|
||||
}
|
||||
List<Facet> boundFacets = new ArrayList<>();
|
||||
Map<HelenusProperty, Filter> filterMap = new HashMap<>(filters.size());
|
||||
filters.forEach(f -> filterMap.put(f.getNode().getProperty(), f));
|
||||
|
||||
for (Facet facet : facets) {
|
||||
if (facet instanceof UnboundFacet) {
|
||||
UnboundFacet unboundFacet = (UnboundFacet) facet;
|
||||
UnboundFacet.Binder binder = unboundFacet.binder();
|
||||
if (filters != null) {
|
||||
for (HelenusProperty prop : unboundFacet.getProperties()) {
|
||||
|
||||
Filter filter = filterMap.get(prop);
|
||||
if (filter != null) {
|
||||
Object[] postulates = filter.postulateValues();
|
||||
for (Object p : postulates) {
|
||||
binder.setValueForProperty(prop, p.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected boolean isIdempotentOperation() {
|
||||
return true;
|
||||
}
|
||||
if (binder.isBound()) {
|
||||
boundFacets.add(binder.bind());
|
||||
}
|
||||
} else {
|
||||
boundFacets.add(facet);
|
||||
}
|
||||
}
|
||||
return boundFacets;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -148,28 +177,20 @@ public final class DeleteOperation extends AbstractFilterOperation<ResultSet, De
|
|||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Facet> getFacets() {
|
||||
return entity.getFacets();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ResultSet sync(UnitOfWork uow) throws TimeoutException {
|
||||
if (uow == null) {
|
||||
return sync();
|
||||
}
|
||||
ResultSet result = super.sync(uow);
|
||||
uow.cacheEvict(bindFacetValues());
|
||||
List<Facet> facets = getFacets();
|
||||
uow.cacheEvict(bindFacetValues(facets));
|
||||
return result;
|
||||
}
|
||||
|
||||
public ResultSet batch(UnitOfWork uow) throws TimeoutException {
|
||||
if (uow == null) {
|
||||
throw new HelenusException("UnitOfWork cannot be null when batching operations.");
|
||||
}
|
||||
|
||||
uow.cacheEvict(bindFacetValues());
|
||||
uow.batch(this);
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Facet> getFacets() {
|
||||
return entity.getFacets();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,24 +15,23 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.function.Function;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
import com.datastax.driver.core.querybuilder.Insert;
|
||||
import com.datastax.driver.core.querybuilder.QueryBuilder;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import net.helenus.core.AbstractSessionOperations;
|
||||
import net.helenus.core.Getter;
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.UnitOfWork;
|
||||
import net.helenus.core.cache.CacheUtil;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.core.cache.UnboundFacet;
|
||||
import net.helenus.core.reflect.DefaultPrimitiveTypes;
|
||||
import net.helenus.core.reflect.Drafted;
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.core.reflect.MapExportable;
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
import net.helenus.mapping.MappingUtil;
|
||||
|
@ -44,63 +42,37 @@ import net.helenus.support.HelenusMappingException;
|
|||
|
||||
public final class InsertOperation<T> extends AbstractOperation<T, InsertOperation<T>> {
|
||||
|
||||
private final List<Fun.Tuple2<HelenusPropertyNode, Object>> values =
|
||||
new ArrayList<Fun.Tuple2<HelenusPropertyNode, Object>>();
|
||||
private final List<Fun.Tuple2<HelenusPropertyNode, Object>> values = new ArrayList<Fun.Tuple2<HelenusPropertyNode, Object>>();
|
||||
private final T pojo;
|
||||
private final Class<?> resultType;
|
||||
private final Set<String> readSet;
|
||||
private HelenusEntity entity;
|
||||
private boolean ifNotExists;
|
||||
|
||||
private int[] ttl;
|
||||
private long[] timestamp;
|
||||
private long writeTime = 0L;
|
||||
|
||||
public InsertOperation(AbstractSessionOperations sessionOperations, boolean ifNotExists) {
|
||||
super(sessionOperations);
|
||||
|
||||
this.pojo = null;
|
||||
this.readSet = null;
|
||||
this.ifNotExists = ifNotExists;
|
||||
this.pojo = null;
|
||||
this.resultType = ResultSet.class;
|
||||
}
|
||||
|
||||
public InsertOperation(
|
||||
AbstractSessionOperations sessionOperations,
|
||||
HelenusEntity entity,
|
||||
Class<?> resultType,
|
||||
boolean ifNotExists) {
|
||||
public InsertOperation(AbstractSessionOperations sessionOperations, Class<?> resultType, boolean ifNotExists) {
|
||||
super(sessionOperations);
|
||||
|
||||
this.pojo = null;
|
||||
this.readSet = null;
|
||||
this.ifNotExists = ifNotExists;
|
||||
this.pojo = null;
|
||||
this.resultType = resultType;
|
||||
}
|
||||
|
||||
public InsertOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity, T pojo,
|
||||
Set<String> mutations, boolean ifNotExists) {
|
||||
super(sessionOperations);
|
||||
|
||||
this.entity = entity;
|
||||
}
|
||||
|
||||
public InsertOperation(
|
||||
AbstractSessionOperations sessionOperations, Class<?> resultType, boolean ifNotExists) {
|
||||
super(sessionOperations);
|
||||
|
||||
this.pojo = null;
|
||||
this.readSet = null;
|
||||
this.ifNotExists = ifNotExists;
|
||||
this.resultType = resultType;
|
||||
}
|
||||
|
||||
public InsertOperation(
|
||||
AbstractSessionOperations sessionOperations,
|
||||
HelenusEntity entity,
|
||||
T pojo,
|
||||
Set<String> mutations,
|
||||
Set<String> read,
|
||||
boolean ifNotExists) {
|
||||
super(sessionOperations);
|
||||
|
||||
this.pojo = pojo;
|
||||
this.readSet = read;
|
||||
this.entity = entity;
|
||||
this.ifNotExists = ifNotExists;
|
||||
this.resultType = entity.getMappingInterface();
|
||||
|
||||
|
@ -111,11 +83,11 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
|
|||
boolean addProp = false;
|
||||
|
||||
switch (prop.getColumnType()) {
|
||||
case PARTITION_KEY:
|
||||
case CLUSTERING_COLUMN:
|
||||
case PARTITION_KEY :
|
||||
case CLUSTERING_COLUMN :
|
||||
addProp = true;
|
||||
break;
|
||||
default:
|
||||
default :
|
||||
addProp = (keys == null || keys.contains(prop.getPropertyName()));
|
||||
}
|
||||
|
||||
|
@ -159,34 +131,11 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
|
|||
|
||||
@Override
|
||||
public BuiltStatement buildStatement(boolean cached) {
|
||||
List<HelenusEntity> entities =
|
||||
values
|
||||
.stream()
|
||||
.map(t -> t._1.getProperty().getEntity())
|
||||
.distinct()
|
||||
.collect(Collectors.toList());
|
||||
if (entities.size() != 1) {
|
||||
throw new HelenusMappingException(
|
||||
"you can insert only single entity at a time, found: "
|
||||
+ entities
|
||||
.stream()
|
||||
.map(e -> e.getMappingInterface().toString())
|
||||
.collect(Collectors.joining(", ")));
|
||||
}
|
||||
HelenusEntity entity = entities.get(0);
|
||||
if (this.entity != null) {
|
||||
if (this.entity != entity) {
|
||||
throw new HelenusMappingException(
|
||||
"you can insert only single entity at a time, found: "
|
||||
+ this.entity.getMappingInterface().toString()
|
||||
+ ", "
|
||||
+ entity.getMappingInterface().toString());
|
||||
}
|
||||
} else {
|
||||
this.entity = entity;
|
||||
}
|
||||
|
||||
if (values.isEmpty()) return null;
|
||||
values.forEach(t -> addPropertyNode(t._1));
|
||||
|
||||
if (values.isEmpty())
|
||||
return null;
|
||||
|
||||
if (entity == null) {
|
||||
throw new HelenusMappingException("unknown entity");
|
||||
|
@ -198,13 +147,10 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
|
|||
insert.ifNotExists();
|
||||
}
|
||||
|
||||
values.forEach(
|
||||
t -> {
|
||||
values.forEach(t -> {
|
||||
insert.value(t._1.getColumnName(), t._2);
|
||||
});
|
||||
|
||||
//TODO(gburd): IF NOT EXISTS when @Constraints.Relationship is 1:1 or 1:m
|
||||
|
||||
if (this.ttl != null) {
|
||||
insert.using(QueryBuilder.ttl(this.ttl[0]));
|
||||
}
|
||||
|
@ -215,9 +161,12 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
|
|||
return insert;
|
||||
}
|
||||
|
||||
private T newInstance(Class<?> iface) {
|
||||
@Override
|
||||
public T transform(ResultSet resultSet) {
|
||||
Class<?> iface = entity.getMappingInterface();
|
||||
if (resultType == iface) {
|
||||
if (values.size() > 0) {
|
||||
boolean immutable = entity.isDraftable();
|
||||
boolean immutable = iface.isAssignableFrom(Drafted.class);
|
||||
Collection<HelenusProperty> properties = entity.getOrderedProperties();
|
||||
Map<String, Object> backingMap = new HashMap<String, Object>(properties.size());
|
||||
|
||||
|
@ -230,8 +179,8 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
|
|||
if (backingMap.containsKey(key)) {
|
||||
// Some values man need to be converted (e.g. from String to Enum). This is done
|
||||
// within the BeanColumnValueProvider below.
|
||||
Optional<Function<Object, Object>> converter =
|
||||
prop.getReadConverter(sessionOps.getSessionRepository());
|
||||
Optional<Function<Object, Object>> converter = prop
|
||||
.getReadConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
backingMap.put(key, converter.get().apply(backingMap.get(key)));
|
||||
}
|
||||
|
@ -239,8 +188,8 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
|
|||
// If we started this operation with an instance of this type, use values from
|
||||
// that.
|
||||
if (pojo != null) {
|
||||
backingMap.put(
|
||||
key, BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, immutable));
|
||||
backingMap.put(key,
|
||||
BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, immutable));
|
||||
} else {
|
||||
// Otherwise we'll use default values for the property type if available.
|
||||
Class<?> propType = prop.getJavaType();
|
||||
|
@ -258,24 +207,11 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
|
|||
// Lastly, create a new proxy object for the entity and return the new instance.
|
||||
return (T) Helenus.map(iface, backingMap);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public T transform(ResultSet resultSet) {
|
||||
if ((ifNotExists == true) && (resultSet.wasApplied() == false)) {
|
||||
throw new HelenusException("Statement was not applied due to consistency constraints");
|
||||
}
|
||||
|
||||
Class<?> iface = entity.getMappingInterface();
|
||||
if (resultType == iface) {
|
||||
T o = newInstance(iface);
|
||||
if (o == null) {
|
||||
// Oddly, this insert didn't change anything so simply return the pojo.
|
||||
// Oddly, this insert didn't change any value so simply return the pojo.
|
||||
// TODO(gburd): this pojo is the result of a Draft.build() call which will not
|
||||
// preserve object identity (o1 == o2), ... fix me.
|
||||
return (T) pojo;
|
||||
}
|
||||
return o;
|
||||
}
|
||||
return (T) resultSet;
|
||||
}
|
||||
|
||||
|
@ -291,48 +227,20 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
|
|||
return this;
|
||||
}
|
||||
|
||||
protected void adjustTtlAndWriteTime(MapExportable pojo) {
|
||||
if (ttl != null || writeTime != 0L) {
|
||||
List<String> columnNames =
|
||||
values
|
||||
.stream()
|
||||
.map(t -> t._1.getProperty())
|
||||
.filter(
|
||||
prop -> {
|
||||
switch (prop.getColumnType()) {
|
||||
case PARTITION_KEY:
|
||||
case CLUSTERING_COLUMN:
|
||||
return false;
|
||||
default:
|
||||
return true;
|
||||
private void addPropertyNode(HelenusPropertyNode p) {
|
||||
if (entity == null) {
|
||||
entity = p.getEntity();
|
||||
} else if (entity != p.getEntity()) {
|
||||
throw new HelenusMappingException("you can insert only single entity " + entity.getMappingInterface()
|
||||
+ " or " + p.getEntity().getMappingInterface());
|
||||
}
|
||||
})
|
||||
.map(prop -> prop.getColumnName().toCql(false))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
if (columnNames.size() > 0) {
|
||||
if (ttl != null) {
|
||||
columnNames.forEach(name -> pojo.put(CacheUtil.ttlKey(name), ttl));
|
||||
}
|
||||
if (writeTime != 0L) {
|
||||
columnNames.forEach(name -> pojo.put(CacheUtil.writeTimeKey(name), writeTime));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean isIdempotentOperation() {
|
||||
return values.stream().map(v -> v._1.getProperty()).allMatch(prop -> prop.isIdempotent())
|
||||
|| super.isIdempotentOperation();
|
||||
}
|
||||
|
||||
@Override
|
||||
public T sync() throws TimeoutException {
|
||||
T result = super.sync();
|
||||
if (entity.isCacheable() && result != null) {
|
||||
adjustTtlAndWriteTime((MapExportable) result);
|
||||
sessionOps.updateCache(result, bindFacetValues());
|
||||
sessionOps.updateCache(result, entity.getFacets());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
@ -343,76 +251,13 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
|
|||
return sync();
|
||||
}
|
||||
T result = super.sync(uow);
|
||||
if (result != null && pojo != null && !(pojo == result) && pojo.equals(result)) {
|
||||
// To preserve object identity we need to find this object in cache
|
||||
// because it was unchanged by the INSERT but pojo in this case was
|
||||
// the result of a draft.build().
|
||||
T cachedValue = (T) uow.cacheLookup(bindFacetValues());
|
||||
if (cachedValue != null) {
|
||||
result = cachedValue;
|
||||
}
|
||||
}
|
||||
Class<?> iface = entity.getMappingInterface();
|
||||
if (resultType == iface) {
|
||||
if (entity != null && MapExportable.class.isAssignableFrom(entity.getMappingInterface())) {
|
||||
adjustTtlAndWriteTime((MapExportable) result);
|
||||
}
|
||||
cacheUpdate(uow, result, bindFacetValues());
|
||||
cacheUpdate(uow, result, entity.getFacets());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public T batch(UnitOfWork uow) throws TimeoutException {
|
||||
if (uow == null) {
|
||||
throw new HelenusException("UnitOfWork cannot be null when batching operations.");
|
||||
}
|
||||
|
||||
if (this.entity != null) {
|
||||
Class<?> iface = this.entity.getMappingInterface();
|
||||
if (resultType == iface) {
|
||||
final T result = (pojo == null) ? newInstance(iface) : pojo;
|
||||
if (result != null) {
|
||||
adjustTtlAndWriteTime((MapExportable) result);
|
||||
cacheUpdate(uow, result, bindFacetValues());
|
||||
}
|
||||
uow.batch(this);
|
||||
return (T) result;
|
||||
}
|
||||
}
|
||||
|
||||
return sync(uow);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Facet> bindFacetValues() {
|
||||
List<Facet> facets = getFacets();
|
||||
if (facets == null || facets.size() == 0) {
|
||||
return new ArrayList<Facet>();
|
||||
}
|
||||
List<Facet> boundFacets = new ArrayList<>();
|
||||
Map<HelenusProperty, Object> valuesMap = new HashMap<>(values.size());
|
||||
values.forEach(t -> valuesMap.put(t._1.getProperty(), t._2));
|
||||
|
||||
for (Facet facet : facets) {
|
||||
if (facet instanceof UnboundFacet) {
|
||||
UnboundFacet unboundFacet = (UnboundFacet) facet;
|
||||
UnboundFacet.Binder binder = unboundFacet.binder();
|
||||
for (HelenusProperty prop : unboundFacet.getProperties()) {
|
||||
Object value = valuesMap.get(prop);
|
||||
if (value != null) {
|
||||
binder.setValueForProperty(prop, value.toString());
|
||||
}
|
||||
}
|
||||
if (binder.isBound()) {
|
||||
boundFacets.add(binder.bind());
|
||||
}
|
||||
} else {
|
||||
boundFacets.add(facet);
|
||||
}
|
||||
}
|
||||
return boundFacets;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Facet> getFacets() {
|
||||
if (entity != null) {
|
||||
|
@ -421,4 +266,5 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
|
|||
return new ArrayList<Facet>();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,33 +15,36 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import com.codahale.metrics.Meter;
|
||||
import com.codahale.metrics.MetricRegistry;
|
||||
import com.codahale.metrics.Timer;
|
||||
import com.datastax.driver.core.*;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
import com.google.common.base.Stopwatch;
|
||||
import java.net.InetAddress;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.codahale.metrics.Meter;
|
||||
import com.codahale.metrics.MetricRegistry;
|
||||
import com.codahale.metrics.Timer;
|
||||
import com.datastax.driver.core.RegularStatement;
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.ResultSetFuture;
|
||||
import com.datastax.driver.core.Statement;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
import com.google.common.base.Stopwatch;
|
||||
|
||||
import brave.Span;
|
||||
import brave.Tracer;
|
||||
import brave.propagation.TraceContext;
|
||||
import net.helenus.core.AbstractSessionOperations;
|
||||
import net.helenus.core.UnitOfWork;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.support.HelenusException;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public abstract class Operation<E> {
|
||||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(Operation.class);
|
||||
|
||||
protected final AbstractSessionOperations sessionOps;
|
||||
protected boolean showValues;
|
||||
protected long queryExecutionTimeout = 10;
|
||||
protected TimeUnit queryTimeoutUnits = TimeUnit.SECONDS;
|
||||
protected final Meter uowCacheHits;
|
||||
protected final Meter uowCacheMiss;
|
||||
protected final Meter sessionCacheHits;
|
||||
|
@ -53,7 +55,6 @@ public abstract class Operation<E> {
|
|||
|
||||
Operation(AbstractSessionOperations sessionOperations) {
|
||||
this.sessionOps = sessionOperations;
|
||||
this.showValues = sessionOps.showValues();
|
||||
MetricRegistry metrics = sessionOperations.getMetricRegistry();
|
||||
if (metrics == null) {
|
||||
metrics = new MetricRegistry();
|
||||
|
@ -67,10 +68,6 @@ public abstract class Operation<E> {
|
|||
this.requestLatency = metrics.timer("net.helenus.request-latency");
|
||||
}
|
||||
|
||||
public static String queryString(BatchOperation operation, boolean includeValues) {
|
||||
return operation.toString(includeValues);
|
||||
}
|
||||
|
||||
public static String queryString(Statement statement, boolean includeValues) {
|
||||
String query = null;
|
||||
if (statement instanceof BuiltStatement) {
|
||||
|
@ -86,87 +83,51 @@ public abstract class Operation<E> {
|
|||
query = regularStatement.getQueryString();
|
||||
} else {
|
||||
query = statement.toString();
|
||||
|
||||
}
|
||||
return query;
|
||||
}
|
||||
|
||||
public ResultSet execute(
|
||||
AbstractSessionOperations session,
|
||||
UnitOfWork uow,
|
||||
long timeout,
|
||||
TimeUnit units,
|
||||
boolean showValues,
|
||||
boolean cached)
|
||||
throws TimeoutException {
|
||||
public ResultSet execute(AbstractSessionOperations session, UnitOfWork uow, TraceContext traceContext, long timeout,
|
||||
TimeUnit units, boolean showValues, boolean cached) throws TimeoutException {
|
||||
|
||||
// Start recording in a Zipkin sub-span our execution time to perform this
|
||||
// operation.
|
||||
Tracer tracer = session.getZipkinTracer();
|
||||
Span span = null;
|
||||
if (tracer != null && traceContext != null) {
|
||||
span = tracer.newChild(traceContext);
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
if (span != null) {
|
||||
span.name("cassandra");
|
||||
span.start();
|
||||
}
|
||||
|
||||
Statement statement = options(buildStatement(cached));
|
||||
|
||||
if (session.isShowCql()) {
|
||||
String stmt =
|
||||
(this instanceof BatchOperation)
|
||||
? queryString((BatchOperation) this, showValues)
|
||||
: queryString(statement, showValues);
|
||||
session.getPrintStream().println(stmt);
|
||||
} else if (LOG.isDebugEnabled()) {
|
||||
String stmt =
|
||||
(this instanceof BatchOperation)
|
||||
? queryString((BatchOperation) this, showValues)
|
||||
: queryString(statement, showValues);
|
||||
LOG.info("CQL> " + stmt);
|
||||
}
|
||||
|
||||
Stopwatch timer = Stopwatch.createStarted();
|
||||
try {
|
||||
ResultSetFuture futureResultSet = session.executeAsync(statement, uow, timer);
|
||||
if (uow != null) uow.recordCacheAndDatabaseOperationCount(0, 1);
|
||||
ResultSetFuture futureResultSet = session.executeAsync(statement, uow, timer, showValues);
|
||||
if (uow != null)
|
||||
uow.recordCacheAndDatabaseOperationCount(0, 1);
|
||||
ResultSet resultSet = futureResultSet.getUninterruptibly(timeout, units);
|
||||
ColumnDefinitions columnDefinitions = resultSet.getColumnDefinitions();
|
||||
if (LOG.isDebugEnabled()) {
|
||||
ExecutionInfo ei = resultSet.getExecutionInfo();
|
||||
Host qh = ei.getQueriedHost();
|
||||
String oh =
|
||||
ei.getTriedHosts()
|
||||
.stream()
|
||||
.map(Host::getAddress)
|
||||
.map(InetAddress::toString)
|
||||
.collect(Collectors.joining(", "));
|
||||
ConsistencyLevel cl = ei.getAchievedConsistencyLevel();
|
||||
if (cl == null) {
|
||||
cl = statement.getConsistencyLevel();
|
||||
}
|
||||
int se = ei.getSpeculativeExecutions();
|
||||
String warn = ei.getWarnings().stream().collect(Collectors.joining(", "));
|
||||
String ri =
|
||||
String.format(
|
||||
"%s %s ~%s %s %s%s%sspec-retries: %d",
|
||||
"server v" + qh.getCassandraVersion(),
|
||||
qh.getAddress().toString(),
|
||||
(oh != null && !oh.equals("")) ? " [tried: " + oh + "]" : "",
|
||||
qh.getDatacenter(),
|
||||
qh.getRack(),
|
||||
(cl != null)
|
||||
? (" consistency: "
|
||||
+ cl.name()
|
||||
+ " "
|
||||
+ (cl.isDCLocal() ? " DC " : "")
|
||||
+ (cl.isSerial() ? " SC " : ""))
|
||||
: "",
|
||||
(warn != null && !warn.equals("")) ? ": " + warn : "",
|
||||
se);
|
||||
if (uow != null) uow.setInfo(ri);
|
||||
else LOG.debug(ri);
|
||||
}
|
||||
if (!resultSet.wasApplied()
|
||||
&& !(columnDefinitions.size() > 1 || !columnDefinitions.contains("[applied]"))) {
|
||||
throw new HelenusException("Operation Failed");
|
||||
}
|
||||
return resultSet;
|
||||
|
||||
} finally {
|
||||
timer.stop();
|
||||
if (uow != null) uow.addDatabaseTime("Cassandra", timer);
|
||||
if (uow != null)
|
||||
uow.addDatabaseTime("Cassandra", timer);
|
||||
log(statement, uow, timer, showValues);
|
||||
}
|
||||
|
||||
} finally {
|
||||
|
||||
if (span != null) {
|
||||
span.finish();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void log(Statement statement, UnitOfWork uow, Stopwatch timer, boolean showValues) {
|
||||
|
@ -179,16 +140,10 @@ public abstract class Operation<E> {
|
|||
if (timer != null) {
|
||||
timerString = String.format(" %s ", timer.toString());
|
||||
}
|
||||
LOG.info(
|
||||
String.format(
|
||||
"%s%s%s", uowString, timerString, Operation.queryString(statement, showValues)));
|
||||
LOG.info(String.format("%s%s%s", uowString, timerString, Operation.queryString(statement, false)));
|
||||
}
|
||||
}
|
||||
|
||||
protected boolean isIdempotentOperation() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public Statement options(Statement statement) {
|
||||
return statement;
|
||||
}
|
||||
|
@ -208,4 +163,5 @@ public abstract class Operation<E> {
|
|||
public boolean isSessionCacheable() {
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -44,4 +43,5 @@ public final class PreparedOperation<E> {
|
|||
public String toString() {
|
||||
return preparedStatement.getQueryString();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -24,8 +23,7 @@ public final class PreparedOptionalOperation<E> {
|
|||
private final PreparedStatement preparedStatement;
|
||||
private final AbstractOptionalOperation<E, ?> operation;
|
||||
|
||||
public PreparedOptionalOperation(
|
||||
PreparedStatement statement, AbstractOptionalOperation<E, ?> operation) {
|
||||
public PreparedOptionalOperation(PreparedStatement statement, AbstractOptionalOperation<E, ?> operation) {
|
||||
this.preparedStatement = statement;
|
||||
this.operation = operation;
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -24,8 +23,7 @@ public final class PreparedStreamOperation<E> {
|
|||
private final PreparedStatement preparedStatement;
|
||||
private final AbstractStreamOperation<E, ?> operation;
|
||||
|
||||
public PreparedStreamOperation(
|
||||
PreparedStatement statement, AbstractStreamOperation<E, ?> operation) {
|
||||
public PreparedStreamOperation(PreparedStatement statement, AbstractStreamOperation<E, ?> operation) {
|
||||
this.preparedStatement = statement;
|
||||
this.operation = operation;
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,15 +15,16 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Function;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
|
||||
import net.helenus.core.cache.Facet;
|
||||
|
||||
public final class SelectFirstOperation<E>
|
||||
extends AbstractFilterOptionalOperation<E, SelectFirstOperation<E>> {
|
||||
public final class SelectFirstOperation<E> extends AbstractFilterOptionalOperation<E, SelectFirstOperation<E>> {
|
||||
|
||||
private final SelectOperation<E> delegate;
|
||||
|
||||
|
@ -64,9 +64,4 @@ public final class SelectFirstOperation<E>
|
|||
public boolean isSessionCacheable() {
|
||||
return delegate.isSessionCacheable();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean ignoreCache() {
|
||||
return delegate.ignoreCache();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,15 +15,18 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Function;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
|
||||
import net.helenus.core.cache.Facet;
|
||||
|
||||
public final class SelectFirstTransformingOperation<R, E>
|
||||
extends AbstractFilterOptionalOperation<R, SelectFirstTransformingOperation<R, E>> {
|
||||
extends
|
||||
AbstractFilterOptionalOperation<R, SelectFirstTransformingOperation<R, E>> {
|
||||
|
||||
private final SelectOperation<E> delegate;
|
||||
private final Function<E, R> fn;
|
||||
|
@ -57,9 +59,4 @@ public final class SelectFirstTransformingOperation<R, E>
|
|||
public boolean isSessionCacheable() {
|
||||
return delegate.isSessionCacheable();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean ignoreCache() {
|
||||
return delegate.ignoreCache();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,6 +15,14 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Stream;
|
||||
import java.util.stream.StreamSupport;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.Row;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
|
@ -24,15 +31,11 @@ import com.datastax.driver.core.querybuilder.QueryBuilder;
|
|||
import com.datastax.driver.core.querybuilder.Select;
|
||||
import com.datastax.driver.core.querybuilder.Select.Selection;
|
||||
import com.datastax.driver.core.querybuilder.Select.Where;
|
||||
import java.util.*;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Stream;
|
||||
import java.util.stream.StreamSupport;
|
||||
import com.google.common.collect.Iterables;
|
||||
|
||||
import net.helenus.core.*;
|
||||
import net.helenus.core.cache.CacheUtil;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.core.cache.UnboundFacet;
|
||||
import net.helenus.core.reflect.Entity;
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
|
@ -42,8 +45,6 @@ import net.helenus.mapping.value.ColumnValueProvider;
|
|||
import net.helenus.mapping.value.ValueProviderMap;
|
||||
import net.helenus.support.Fun;
|
||||
import net.helenus.support.HelenusMappingException;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, SelectOperation<E>> {
|
||||
|
||||
|
@ -54,17 +55,14 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
|||
protected List<Ordering> ordering = null;
|
||||
protected Integer limit = null;
|
||||
protected boolean allowFiltering = false;
|
||||
|
||||
protected String alternateTableName = null;
|
||||
protected boolean isCacheable = false;
|
||||
protected boolean implementsEntityType = false;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public SelectOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
|
||||
this.rowMapper =
|
||||
new Function<Row, E>() {
|
||||
this.rowMapper = new Function<Row, E>() {
|
||||
|
||||
@Override
|
||||
public E apply(Row source) {
|
||||
|
@ -87,47 +85,31 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
|||
|
||||
super(sessionOperations);
|
||||
|
||||
entity
|
||||
.getOrderedProperties()
|
||||
.stream()
|
||||
.map(p -> new HelenusPropertyNode(p, Optional.empty()))
|
||||
entity.getOrderedProperties().stream().map(p -> new HelenusPropertyNode(p, Optional.empty()))
|
||||
.forEach(p -> this.props.add(p));
|
||||
|
||||
this.isCacheable = entity.isCacheable();
|
||||
this.implementsEntityType = Entity.class.isAssignableFrom(entity.getMappingInterface());
|
||||
isCacheable = entity.isCacheable();
|
||||
}
|
||||
|
||||
public SelectOperation(
|
||||
AbstractSessionOperations sessionOperations,
|
||||
HelenusEntity entity,
|
||||
public SelectOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity,
|
||||
Function<Row, E> rowMapper) {
|
||||
|
||||
super(sessionOperations);
|
||||
this.rowMapper = rowMapper;
|
||||
|
||||
entity
|
||||
.getOrderedProperties()
|
||||
.stream()
|
||||
.map(p -> new HelenusPropertyNode(p, Optional.empty()))
|
||||
entity.getOrderedProperties().stream().map(p -> new HelenusPropertyNode(p, Optional.empty()))
|
||||
.forEach(p -> this.props.add(p));
|
||||
|
||||
this.isCacheable = entity.isCacheable();
|
||||
this.implementsEntityType = Entity.class.isAssignableFrom(entity.getMappingInterface());
|
||||
isCacheable = entity.isCacheable();
|
||||
}
|
||||
|
||||
public SelectOperation(
|
||||
AbstractSessionOperations sessionOperations,
|
||||
Function<Row, E> rowMapper,
|
||||
public SelectOperation(AbstractSessionOperations sessionOperations, Function<Row, E> rowMapper,
|
||||
HelenusPropertyNode... props) {
|
||||
|
||||
super(sessionOperations);
|
||||
|
||||
this.rowMapper = rowMapper;
|
||||
Collections.addAll(this.props, props);
|
||||
|
||||
HelenusEntity entity = props[0].getEntity();
|
||||
this.isCacheable = entity.isCacheable();
|
||||
this.implementsEntityType = Entity.class.isAssignableFrom(entity.getMappingInterface());
|
||||
}
|
||||
|
||||
public CountOperation count() {
|
||||
|
@ -138,11 +120,8 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
|||
if (entity == null) {
|
||||
entity = prop.getEntity();
|
||||
} else if (entity != prop.getEntity()) {
|
||||
throw new HelenusMappingException(
|
||||
"you can count records only from a single entity "
|
||||
+ entity.getMappingInterface()
|
||||
+ " or "
|
||||
+ prop.getEntity().getMappingInterface());
|
||||
throw new HelenusMappingException("you can count records only from a single entity "
|
||||
+ entity.getMappingInterface() + " or " + prop.getEntity().getMappingInterface());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -154,10 +133,7 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
|||
HelenusEntity entity = Helenus.entity(materializedViewClass);
|
||||
this.alternateTableName = entity.getName().toCql();
|
||||
this.props.clear();
|
||||
entity
|
||||
.getOrderedProperties()
|
||||
.stream()
|
||||
.map(p -> new HelenusPropertyNode(p, Optional.empty()))
|
||||
entity.getOrderedProperties().stream().map(p -> new HelenusPropertyNode(p, Optional.empty()))
|
||||
.forEach(p -> this.props.add(p));
|
||||
return this;
|
||||
}
|
||||
|
@ -175,9 +151,7 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
|||
|
||||
this.rowMapper = null;
|
||||
|
||||
return new SelectTransformingOperation<R, E>(
|
||||
this,
|
||||
(r) -> {
|
||||
return new SelectTransformingOperation<R, E>(this, (r) -> {
|
||||
Map<String, Object> map = new ValueProviderMap(r, sessionOps.getValueProvider(), entity);
|
||||
return (R) Helenus.map(entityClass, map);
|
||||
});
|
||||
|
@ -243,6 +217,7 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
if (binder.isBound()) {
|
||||
boundFacets.add(binder.bind());
|
||||
|
@ -264,27 +239,32 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
|||
String columnName = prop.getColumnName();
|
||||
selection = selection.column(columnName);
|
||||
|
||||
if (prop.getProperty().caseSensitiveIndex()) {
|
||||
allowFiltering = true;
|
||||
}
|
||||
|
||||
if (entity == null) {
|
||||
entity = prop.getEntity();
|
||||
} else if (entity != prop.getEntity()) {
|
||||
throw new HelenusMappingException(
|
||||
"you can select columns only from a single entity "
|
||||
+ entity.getMappingInterface()
|
||||
+ " or "
|
||||
+ prop.getEntity().getMappingInterface());
|
||||
throw new HelenusMappingException("you can select columns only from a single entity "
|
||||
+ entity.getMappingInterface() + " or " + prop.getEntity().getMappingInterface());
|
||||
}
|
||||
|
||||
if (cached && implementsEntityType) {
|
||||
// TODO(gburd): writeTime and ttl will be useful on merge() but cause object
|
||||
// identity to fail.
|
||||
if (false && cached) {
|
||||
switch (prop.getProperty().getColumnType()) {
|
||||
case PARTITION_KEY:
|
||||
case CLUSTERING_COLUMN:
|
||||
case PARTITION_KEY :
|
||||
case CLUSTERING_COLUMN :
|
||||
break;
|
||||
default:
|
||||
default :
|
||||
if (entity.equals(prop.getEntity())) {
|
||||
if (prop.getNext().isPresent()) {
|
||||
columnName = Iterables.getLast(prop).getColumnName().toCql(true);
|
||||
}
|
||||
if (!prop.getProperty().getDataType().isCollectionType()) {
|
||||
columnName = prop.getProperty().getColumnName().toCql(false);
|
||||
selection.ttl(columnName).as('"' + CacheUtil.ttlKey(columnName) + '"');
|
||||
selection.writeTime(columnName).as('"' + CacheUtil.writeTimeKey(columnName) + '"');
|
||||
selection.writeTime(columnName).as(columnName + "_writeTime");
|
||||
selection.ttl(columnName).as(columnName + "_ttl");
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
@ -311,35 +291,8 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
|||
|
||||
Where where = select.where();
|
||||
|
||||
boolean isFirstIndex = true;
|
||||
for (Filter<?> filter : filters.values()) {
|
||||
where.and(filter.getClause(sessionOps.getValuePreparer()));
|
||||
HelenusProperty filterProp = filter.getNode().getProperty();
|
||||
HelenusProperty prop =
|
||||
props
|
||||
.stream()
|
||||
.map(HelenusPropertyNode::getProperty)
|
||||
.filter(thisProp -> thisProp.getPropertyName().equals(filterProp.getPropertyName()))
|
||||
.findFirst()
|
||||
.orElse(null);
|
||||
if (allowFiltering == false && prop != null) {
|
||||
switch (prop.getColumnType()) {
|
||||
case PARTITION_KEY:
|
||||
break;
|
||||
case CLUSTERING_COLUMN:
|
||||
default:
|
||||
// When using non-Cassandra-standard 2i types or when using more than one
|
||||
// indexed column or non-indexed columns the query must include ALLOW FILTERING.
|
||||
if (prop.caseSensitiveIndex() == false) {
|
||||
allowFiltering = true;
|
||||
} else if (prop.getIndexName() != null) {
|
||||
allowFiltering |= !isFirstIndex;
|
||||
isFirstIndex = false;
|
||||
} else {
|
||||
allowFiltering = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -358,14 +311,12 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
|||
@Override
|
||||
public Stream<E> transform(ResultSet resultSet) {
|
||||
if (rowMapper != null) {
|
||||
return StreamSupport.stream(
|
||||
Spliterators.spliteratorUnknownSize(resultSet.iterator(), Spliterator.ORDERED), false)
|
||||
return StreamSupport
|
||||
.stream(Spliterators.spliteratorUnknownSize(resultSet.iterator(), Spliterator.ORDERED), false)
|
||||
.map(rowMapper);
|
||||
} else {
|
||||
return (Stream<E>)
|
||||
StreamSupport.stream(
|
||||
Spliterators.spliteratorUnknownSize(resultSet.iterator(), Spliterator.ORDERED),
|
||||
false);
|
||||
return (Stream<E>) StreamSupport
|
||||
.stream(Spliterators.spliteratorUnknownSize(resultSet.iterator(), Spliterator.ORDERED), false);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,15 +15,18 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
import java.util.List;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
|
||||
import net.helenus.core.cache.Facet;
|
||||
|
||||
public final class SelectTransformingOperation<R, E>
|
||||
extends AbstractFilterStreamOperation<R, SelectTransformingOperation<R, E>> {
|
||||
extends
|
||||
AbstractFilterStreamOperation<R, SelectTransformingOperation<R, E>> {
|
||||
|
||||
private final SelectOperation<E> delegate;
|
||||
private final Function<E, R> fn;
|
||||
|
@ -57,14 +59,4 @@ public final class SelectTransformingOperation<R, E>
|
|||
public Stream<R> transform(ResultSet resultSet) {
|
||||
return delegate.transform(resultSet).map(fn);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isSessionCacheable() {
|
||||
return delegate.isSessionCacheable();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean ignoreCache() {
|
||||
return delegate.ignoreCache();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,83 +15,53 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.function.Function;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.querybuilder.Assignment;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
import com.datastax.driver.core.querybuilder.QueryBuilder;
|
||||
import com.datastax.driver.core.querybuilder.Update;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import net.helenus.core.*;
|
||||
import net.helenus.core.cache.BoundFacet;
|
||||
import net.helenus.core.cache.CacheUtil;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.core.reflect.MapExportable;
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
import net.helenus.mapping.MappingUtil;
|
||||
import net.helenus.mapping.value.BeanColumnValueProvider;
|
||||
import net.helenus.support.HelenusException;
|
||||
import net.helenus.support.HelenusMappingException;
|
||||
import net.helenus.support.Immutables;
|
||||
|
||||
public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateOperation<E>> {
|
||||
|
||||
private final Map<Assignment, BoundFacet> assignments = new HashMap<>();
|
||||
private final List<Assignment> assignments = new ArrayList<Assignment>();
|
||||
private final AbstractEntityDraft<E> draft;
|
||||
private final Map<String, Object> draftMap;
|
||||
private final Set<String> readSet;
|
||||
private HelenusEntity entity = null;
|
||||
private Object pojo;
|
||||
private int[] ttl;
|
||||
private long[] timestamp;
|
||||
private long writeTime = 0L;
|
||||
|
||||
public UpdateOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
this.draft = null;
|
||||
this.draftMap = null;
|
||||
this.readSet = null;
|
||||
}
|
||||
|
||||
public UpdateOperation(
|
||||
AbstractSessionOperations sessionOperations, AbstractEntityDraft<E> draft) {
|
||||
public UpdateOperation(AbstractSessionOperations sessionOperations, AbstractEntityDraft<E> draft) {
|
||||
super(sessionOperations);
|
||||
this.draft = draft;
|
||||
this.draftMap = draft.toMap();
|
||||
this.readSet = draft.read();
|
||||
}
|
||||
|
||||
public UpdateOperation(AbstractSessionOperations sessionOperations, Object pojo) {
|
||||
public UpdateOperation(AbstractSessionOperations sessionOperations, HelenusPropertyNode p, Object v) {
|
||||
super(sessionOperations);
|
||||
this.draft = null;
|
||||
this.draftMap = null;
|
||||
|
||||
if (pojo != null) {
|
||||
this.entity = Helenus.resolve(MappingUtil.getMappingInterface(pojo));
|
||||
if (this.entity != null && entity.isCacheable() && pojo instanceof MapExportable) {
|
||||
this.pojo = pojo;
|
||||
this.readSet = ((MapExportable) pojo).toReadSet();
|
||||
} else {
|
||||
this.readSet = null;
|
||||
}
|
||||
} else {
|
||||
this.readSet = null;
|
||||
}
|
||||
}
|
||||
|
||||
public UpdateOperation(
|
||||
AbstractSessionOperations sessionOperations, HelenusPropertyNode p, Object v) {
|
||||
super(sessionOperations);
|
||||
this.draft = null;
|
||||
this.draftMap = null;
|
||||
this.readSet = null;
|
||||
|
||||
Object value = sessionOps.getValuePreparer().prepareColumnValue(v, p.getProperty());
|
||||
assignments.put(QueryBuilder.set(p.getColumnName(), value), new BoundFacet(p.getProperty(), v));
|
||||
assignments.add(QueryBuilder.set(p.getColumnName(), value));
|
||||
|
||||
addPropertyNode(p);
|
||||
}
|
||||
|
@ -101,24 +70,9 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
Objects.requireNonNull(getter, "getter is empty");
|
||||
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(getter);
|
||||
HelenusProperty prop = p.getProperty();
|
||||
|
||||
Object value = sessionOps.getValuePreparer().prepareColumnValue(v, prop);
|
||||
assignments.put(QueryBuilder.set(p.getColumnName(), value), new BoundFacet(prop, value));
|
||||
|
||||
if (draft != null) {
|
||||
String key = prop.getPropertyName();
|
||||
if (draft.get(key, value.getClass()) != v) {
|
||||
draft.set(key, v);
|
||||
}
|
||||
}
|
||||
|
||||
if (pojo != null) {
|
||||
if (!BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop).equals(v)) {
|
||||
String key = prop.getPropertyName();
|
||||
((MapExportable) pojo).put(key, v);
|
||||
}
|
||||
}
|
||||
Object value = sessionOps.getValuePreparer().prepareColumnValue(v, p.getProperty());
|
||||
assignments.add(QueryBuilder.set(p.getColumnName(), value));
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
|
@ -143,21 +97,15 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(counterGetter);
|
||||
|
||||
BoundFacet facet = null;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
if (pojo != null) {
|
||||
Long value = (Long) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop);
|
||||
facet = new BoundFacet(prop, value + delta);
|
||||
} else if (draft != null) {
|
||||
String key = prop.getPropertyName();
|
||||
draftMap.put(key, (Long) draftMap.get(key) + delta);
|
||||
facet = new BoundFacet(prop, draftMap.get(key));
|
||||
}
|
||||
|
||||
assignments.put(QueryBuilder.incr(p.getColumnName(), delta), facet);
|
||||
assignments.add(QueryBuilder.incr(p.getColumnName(), delta));
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
draftMap.put(key, (Long) draftMap.get(key) + delta);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -171,21 +119,15 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(counterGetter);
|
||||
|
||||
BoundFacet facet = null;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
if (pojo != null) {
|
||||
Long value = (Long) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop);
|
||||
facet = new BoundFacet(prop, value - delta);
|
||||
} else if (draft != null) {
|
||||
String key = prop.getPropertyName();
|
||||
draftMap.put(key, (Long) draftMap.get(key) - delta);
|
||||
facet = new BoundFacet(prop, draftMap.get(key));
|
||||
}
|
||||
|
||||
assignments.put(QueryBuilder.decr(p.getColumnName(), delta), facet);
|
||||
assignments.add(QueryBuilder.decr(p.getColumnName(), delta));
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
draftMap.put(key, (Long) draftMap.get(key) - delta);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -204,27 +146,16 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
Object valueObj = prepareSingleListValue(p, value);
|
||||
|
||||
final List<V> list;
|
||||
final BoundFacet facet;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
if (pojo != null) {
|
||||
list = (List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
list.add(0, value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else if (draft != null) {
|
||||
String key = prop.getPropertyName();
|
||||
list = (List<V>) draftMap.get(key);
|
||||
list.add(0, value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else {
|
||||
list = null;
|
||||
facet = null;
|
||||
}
|
||||
|
||||
assignments.put(QueryBuilder.prepend(p.getColumnName(), valueObj), facet);
|
||||
assignments.add(QueryBuilder.prepend(p.getColumnName(), valueObj));
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
list.add(0, value);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -236,27 +167,16 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
List valueObj = prepareListValue(p, value);
|
||||
|
||||
final List<V> list;
|
||||
final BoundFacet facet;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
if (pojo != null) {
|
||||
list = (List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
list.addAll(0, value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else if (draft != null && value.size() > 0) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
list = (List<V>) draftMap.get(key);
|
||||
list.addAll(0, value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else {
|
||||
list = null;
|
||||
facet = null;
|
||||
}
|
||||
|
||||
assignments.put(QueryBuilder.prependAll(p.getColumnName(), valueObj), facet);
|
||||
assignments.add(QueryBuilder.prependAll(p.getColumnName(), valueObj));
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null && value.size() > 0) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
list.addAll(0, value);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -268,16 +188,13 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
Object valueObj = prepareSingleListValue(p, value);
|
||||
|
||||
final BoundFacet facet;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
if (pojo != null || draft != null) {
|
||||
final List<V> list;
|
||||
if (pojo != null) {
|
||||
list = (List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
} else {
|
||||
String key = prop.getPropertyName();
|
||||
list = (List<V>) draftMap.get(key);
|
||||
}
|
||||
assignments.add(QueryBuilder.setIdx(p.getColumnName(), idx, valueObj));
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
if (idx < 0) {
|
||||
list.add(0, value);
|
||||
} else if (idx > list.size()) {
|
||||
|
@ -286,15 +203,8 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
list.add(idx, value);
|
||||
}
|
||||
list.add(0, value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else {
|
||||
facet = null;
|
||||
}
|
||||
|
||||
assignments.put(QueryBuilder.setIdx(p.getColumnName(), idx, valueObj), facet);
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -306,26 +216,16 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
Object valueObj = prepareSingleListValue(p, value);
|
||||
|
||||
final List<V> list;
|
||||
final BoundFacet facet;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
if (pojo != null) {
|
||||
list = (List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
list.add(value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else if (draft != null) {
|
||||
String key = prop.getPropertyName();
|
||||
list = (List<V>) draftMap.get(key);
|
||||
list.add(value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else {
|
||||
list = null;
|
||||
facet = null;
|
||||
}
|
||||
assignments.put(QueryBuilder.append(p.getColumnName(), valueObj), facet);
|
||||
assignments.add(QueryBuilder.append(p.getColumnName(), valueObj));
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
list.add(value);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -337,26 +237,16 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
List valueObj = prepareListValue(p, value);
|
||||
|
||||
final List<V> list;
|
||||
final BoundFacet facet;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
if (pojo != null) {
|
||||
list = (List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
list.addAll(value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else if (draft != null && value.size() > 0) {
|
||||
String key = prop.getPropertyName();
|
||||
list = (List<V>) draftMap.get(key);
|
||||
list.addAll(value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else {
|
||||
list = null;
|
||||
facet = null;
|
||||
}
|
||||
assignments.put(QueryBuilder.appendAll(p.getColumnName(), valueObj), facet);
|
||||
assignments.add(QueryBuilder.appendAll(p.getColumnName(), valueObj));
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null && value.size() > 0) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
list.addAll(value);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -368,26 +258,16 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
Object valueObj = prepareSingleListValue(p, value);
|
||||
|
||||
final List<V> list;
|
||||
final BoundFacet facet;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
if (pojo != null) {
|
||||
list = (List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
list.remove(value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else if (draft != null) {
|
||||
String key = prop.getPropertyName();
|
||||
list = (List<V>) draftMap.get(key);
|
||||
list.remove(value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else {
|
||||
list = null;
|
||||
facet = null;
|
||||
}
|
||||
assignments.put(QueryBuilder.discard(p.getColumnName(), valueObj), facet);
|
||||
assignments.add(QueryBuilder.discard(p.getColumnName(), valueObj));
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
list.remove(value);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -399,26 +279,16 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
List valueObj = prepareListValue(p, value);
|
||||
|
||||
final List<V> list;
|
||||
final BoundFacet facet;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
if (pojo != null) {
|
||||
list = (List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
list.removeAll(value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else if (draft != null) {
|
||||
String key = prop.getPropertyName();
|
||||
list = (List<V>) draftMap.get(key);
|
||||
list.removeAll(value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else {
|
||||
list = null;
|
||||
facet = null;
|
||||
}
|
||||
assignments.put(QueryBuilder.discardAll(p.getColumnName(), valueObj), facet);
|
||||
assignments.add(QueryBuilder.discardAll(p.getColumnName(), valueObj));
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
list.removeAll(value);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -427,8 +297,7 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
|
||||
Object valueObj = value;
|
||||
|
||||
Optional<Function<Object, Object>> converter =
|
||||
prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
List convertedList = (List) converter.get().apply(Immutables.listOf(value));
|
||||
valueObj = convertedList.get(0);
|
||||
|
@ -443,8 +312,7 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
|
||||
List valueObj = value;
|
||||
|
||||
Optional<Function<Object, Object>> converter =
|
||||
prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
valueObj = (List) converter.get().apply(value);
|
||||
}
|
||||
|
@ -468,26 +336,16 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
|
||||
Object valueObj = prepareSingleSetValue(p, value);
|
||||
|
||||
final Set<V> set;
|
||||
final BoundFacet facet;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
if (pojo != null) {
|
||||
set = (Set<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
set.add(value);
|
||||
facet = new BoundFacet(prop, set);
|
||||
} else if (draft != null) {
|
||||
String key = prop.getPropertyName();
|
||||
set = (Set<V>) draftMap.get(key);
|
||||
set.add(value);
|
||||
facet = new BoundFacet(prop, set);
|
||||
} else {
|
||||
set = null;
|
||||
facet = null;
|
||||
}
|
||||
assignments.put(QueryBuilder.add(p.getColumnName(), valueObj), facet);
|
||||
assignments.add(QueryBuilder.add(p.getColumnName(), valueObj));
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
Set<V> set = (Set<V>) draftMap.get(key);
|
||||
set.add(value);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -499,26 +357,16 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
|
||||
Set valueObj = prepareSetValue(p, value);
|
||||
|
||||
final Set<V> set;
|
||||
final BoundFacet facet;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
if (pojo != null) {
|
||||
set = (Set<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
set.addAll(value);
|
||||
facet = new BoundFacet(prop, set);
|
||||
} else if (draft != null) {
|
||||
String key = prop.getPropertyName();
|
||||
set = (Set<V>) draftMap.get(key);
|
||||
set.addAll(value);
|
||||
facet = new BoundFacet(prop, set);
|
||||
} else {
|
||||
set = null;
|
||||
facet = null;
|
||||
}
|
||||
assignments.put(QueryBuilder.addAll(p.getColumnName(), valueObj), facet);
|
||||
assignments.add(QueryBuilder.addAll(p.getColumnName(), valueObj));
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
Set<V> set = (Set<V>) draftMap.get(key);
|
||||
set.addAll(value);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -530,26 +378,16 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
|
||||
Object valueObj = prepareSingleSetValue(p, value);
|
||||
|
||||
final Set<V> set;
|
||||
final BoundFacet facet;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
if (pojo != null) {
|
||||
set = (Set<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
set.remove(value);
|
||||
facet = new BoundFacet(prop, set);
|
||||
} else if (draft != null) {
|
||||
String key = prop.getPropertyName();
|
||||
set = (Set<V>) draftMap.get(key);
|
||||
set.remove(value);
|
||||
facet = new BoundFacet(prop, set);
|
||||
} else {
|
||||
set = null;
|
||||
facet = null;
|
||||
}
|
||||
assignments.put(QueryBuilder.remove(p.getColumnName(), valueObj), facet);
|
||||
assignments.add(QueryBuilder.remove(p.getColumnName(), valueObj));
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
Set<V> set = (Set<V>) draftMap.get(key);
|
||||
set.remove(value);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -561,26 +399,16 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
|
||||
Set valueObj = prepareSetValue(p, value);
|
||||
|
||||
final Set<V> set;
|
||||
final BoundFacet facet;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
if (pojo != null) {
|
||||
set = (Set<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
set.removeAll(value);
|
||||
facet = new BoundFacet(prop, set);
|
||||
} else if (draft != null) {
|
||||
String key = prop.getPropertyName();
|
||||
set = (Set<V>) draftMap.get(key);
|
||||
set.removeAll(value);
|
||||
facet = new BoundFacet(prop, set);
|
||||
} else {
|
||||
set = null;
|
||||
facet = null;
|
||||
}
|
||||
assignments.put(QueryBuilder.removeAll(p.getColumnName(), valueObj), facet);
|
||||
assignments.add(QueryBuilder.removeAll(p.getColumnName(), valueObj));
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
Set<V> set = (Set<V>) draftMap.get(key);
|
||||
set.removeAll(value);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -589,8 +417,7 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusProperty prop = p.getProperty();
|
||||
Object valueObj = value;
|
||||
|
||||
Optional<Function<Object, Object>> converter =
|
||||
prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
Set convertedSet = (Set) converter.get().apply(Immutables.setOf(value));
|
||||
valueObj = convertedSet.iterator().next();
|
||||
|
@ -604,8 +431,7 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusProperty prop = p.getProperty();
|
||||
Set valueObj = value;
|
||||
|
||||
Optional<Function<Object, Object>> converter =
|
||||
prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
valueObj = (Set) converter.get().apply(value);
|
||||
}
|
||||
|
@ -629,35 +455,23 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(mapGetter);
|
||||
HelenusProperty prop = p.getProperty();
|
||||
|
||||
final Map<K, V> map;
|
||||
final BoundFacet facet;
|
||||
if (pojo != null) {
|
||||
map = (Map<K, V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
map.put(key, value);
|
||||
facet = new BoundFacet(prop, map);
|
||||
} else if (draft != null) {
|
||||
map = (Map<K, V>) draftMap.get(prop.getPropertyName());
|
||||
map.put(key, value);
|
||||
facet = new BoundFacet(prop, map);
|
||||
} else {
|
||||
map = null;
|
||||
facet = null;
|
||||
}
|
||||
|
||||
Optional<Function<Object, Object>> converter =
|
||||
prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
Map<Object, Object> convertedMap =
|
||||
(Map<Object, Object>) converter.get().apply(Immutables.mapOf(key, value));
|
||||
Map<Object, Object> convertedMap = (Map<Object, Object>) converter.get()
|
||||
.apply(Immutables.mapOf(key, value));
|
||||
for (Map.Entry<Object, Object> e : convertedMap.entrySet()) {
|
||||
assignments.put(QueryBuilder.put(p.getColumnName(), e.getKey(), e.getValue()), facet);
|
||||
assignments.add(QueryBuilder.put(p.getColumnName(), e.getKey(), e.getValue()));
|
||||
}
|
||||
} else {
|
||||
assignments.put(QueryBuilder.put(p.getColumnName(), key, value), facet);
|
||||
assignments.add(QueryBuilder.put(p.getColumnName(), key, value));
|
||||
}
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
((Map<K, V>) draftMap.get(prop.getPropertyName())).put(key, value);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -669,32 +483,20 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(mapGetter);
|
||||
HelenusProperty prop = p.getProperty();
|
||||
|
||||
final Map<K, V> newMap;
|
||||
final BoundFacet facet;
|
||||
if (pojo != null) {
|
||||
newMap = (Map<K, V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
newMap.putAll(map);
|
||||
facet = new BoundFacet(prop, newMap);
|
||||
} else if (draft != null) {
|
||||
newMap = (Map<K, V>) draftMap.get(prop.getPropertyName());
|
||||
newMap.putAll(map);
|
||||
facet = new BoundFacet(prop, newMap);
|
||||
} else {
|
||||
newMap = null;
|
||||
facet = null;
|
||||
}
|
||||
|
||||
Optional<Function<Object, Object>> converter =
|
||||
prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
Map convertedMap = (Map) converter.get().apply(map);
|
||||
assignments.put(QueryBuilder.putAll(p.getColumnName(), convertedMap), facet);
|
||||
assignments.add(QueryBuilder.putAll(p.getColumnName(), convertedMap));
|
||||
} else {
|
||||
assignments.put(QueryBuilder.putAll(p.getColumnName(), map), facet);
|
||||
assignments.add(QueryBuilder.putAll(p.getColumnName(), map));
|
||||
}
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
((Map<K, V>) draftMap.get(prop.getPropertyName())).putAll(map);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -707,7 +509,7 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
|
||||
Update update = QueryBuilder.update(entity.getName().toCql());
|
||||
|
||||
for (Assignment assignment : assignments.keySet()) {
|
||||
for (Assignment assignment : assignments) {
|
||||
update.with(assignment);
|
||||
}
|
||||
|
||||
|
@ -738,10 +540,6 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
|
||||
@Override
|
||||
public E transform(ResultSet resultSet) {
|
||||
if ((ifFilters != null && !ifFilters.isEmpty()) && (resultSet.wasApplied() == false)) {
|
||||
throw new HelenusException("Statement was not applied due to consistency constraints");
|
||||
}
|
||||
|
||||
if (draft != null) {
|
||||
return Helenus.map(draft.getEntityClass(), draft.toMap(draftMap));
|
||||
} else {
|
||||
|
@ -765,70 +563,16 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
if (entity == null) {
|
||||
entity = p.getEntity();
|
||||
} else if (entity != p.getEntity()) {
|
||||
throw new HelenusMappingException(
|
||||
"you can update columns only in single entity "
|
||||
+ entity.getMappingInterface()
|
||||
+ " or "
|
||||
+ p.getEntity().getMappingInterface());
|
||||
throw new HelenusMappingException("you can update columns only in single entity "
|
||||
+ entity.getMappingInterface() + " or " + p.getEntity().getMappingInterface());
|
||||
}
|
||||
}
|
||||
|
||||
private void adjustTtlAndWriteTime(MapExportable pojo) {
|
||||
if (ttl != null || writeTime != 0L) {
|
||||
List<String> names = new ArrayList<String>(assignments.size());
|
||||
for (BoundFacet facet : assignments.values()) {
|
||||
for (HelenusProperty prop : facet.getProperties()) {
|
||||
names.add(prop.getColumnName().toCql(false));
|
||||
}
|
||||
}
|
||||
|
||||
if (names.size() > 0) {
|
||||
if (ttl != null) {
|
||||
names.forEach(name -> pojo.put(CacheUtil.ttlKey(name), ttl));
|
||||
}
|
||||
if (writeTime != 0L) {
|
||||
names.forEach(name -> pojo.put(CacheUtil.writeTimeKey(name), writeTime));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean isIdempotentOperation() {
|
||||
return assignments
|
||||
.values()
|
||||
.stream()
|
||||
.allMatch(
|
||||
facet -> {
|
||||
if (facet != null) {
|
||||
Set<HelenusProperty> props = facet.getProperties();
|
||||
if (props != null && props.size() > 0) {
|
||||
return props.stream().allMatch(prop -> prop.isIdempotent());
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
} else {
|
||||
// In this case our UPDATE statement made mutations via the List, Set, Map methods only.
|
||||
return false;
|
||||
}
|
||||
})
|
||||
|| super.isIdempotentOperation();
|
||||
}
|
||||
|
||||
@Override
|
||||
public E sync() throws TimeoutException {
|
||||
E result = super.sync();
|
||||
if (result != null && entity.isCacheable()) {
|
||||
if (draft != null) {
|
||||
adjustTtlAndWriteTime(draft);
|
||||
adjustTtlAndWriteTime((MapExportable) result);
|
||||
sessionOps.updateCache(result, bindFacetValues());
|
||||
} else if (pojo != null) {
|
||||
adjustTtlAndWriteTime((MapExportable) pojo);
|
||||
sessionOps.updateCache(pojo, bindFacetValues());
|
||||
} else {
|
||||
sessionOps.cacheEvict(bindFacetValues());
|
||||
}
|
||||
if (entity.isCacheable() && draft != null) {
|
||||
sessionOps.updateCache(result, getFacets());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
@ -839,60 +583,12 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
return sync();
|
||||
}
|
||||
E result = super.sync(uow);
|
||||
if (result != null) {
|
||||
if (draft != null) {
|
||||
adjustTtlAndWriteTime(draft);
|
||||
}
|
||||
if (entity != null && MapExportable.class.isAssignableFrom(entity.getMappingInterface())) {
|
||||
adjustTtlAndWriteTime((MapExportable) result);
|
||||
cacheUpdate(uow, result, bindFacetValues());
|
||||
} else if (pojo != null) {
|
||||
adjustTtlAndWriteTime((MapExportable) pojo);
|
||||
cacheUpdate(uow, (E) pojo, bindFacetValues());
|
||||
return (E) pojo;
|
||||
}
|
||||
cacheUpdate(uow, result, getFacets());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public E batch(UnitOfWork uow) throws TimeoutException {
|
||||
if (uow == null) {
|
||||
throw new HelenusException("UnitOfWork cannot be null when batching operations.");
|
||||
}
|
||||
|
||||
final E result;
|
||||
if (draft != null) {
|
||||
result = draft.build();
|
||||
adjustTtlAndWriteTime(draft);
|
||||
} else if (pojo != null) {
|
||||
result = (E) pojo;
|
||||
adjustTtlAndWriteTime((MapExportable) pojo);
|
||||
} else {
|
||||
result = null;
|
||||
}
|
||||
|
||||
if (result != null) {
|
||||
cacheUpdate(uow, result, bindFacetValues());
|
||||
uow.batch(this);
|
||||
return result;
|
||||
}
|
||||
|
||||
return sync(uow);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Facet> bindFacetValues() {
|
||||
List<Facet> facets = bindFacetValues(entity.getFacets());
|
||||
facets.addAll(
|
||||
assignments
|
||||
.values()
|
||||
.stream()
|
||||
.distinct()
|
||||
.filter(o -> o != null)
|
||||
.collect(Collectors.toList()));
|
||||
return facets;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Facet> getFacets() {
|
||||
if (entity != null) {
|
||||
|
@ -901,4 +597,5 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
return new ArrayList<Facet>();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -20,17 +19,10 @@ import java.util.HashMap;
|
|||
import java.util.Map;
|
||||
|
||||
public enum DefaultPrimitiveTypes {
|
||||
BOOLEAN(boolean.class, false),
|
||||
BYTE(byte.class, (byte) 0x0),
|
||||
CHAR(char.class, (char) 0x0),
|
||||
SHORT(short.class, (short) 0),
|
||||
INT(int.class, 0),
|
||||
LONG(long.class, 0L),
|
||||
FLOAT(float.class, 0.0f),
|
||||
DOUBLE(double.class, 0.0);
|
||||
BOOLEAN(boolean.class, false), BYTE(byte.class, (byte) 0x0), CHAR(char.class, (char) 0x0), SHORT(short.class,
|
||||
(short) 0), INT(int.class, 0), LONG(long.class, 0L), FLOAT(float.class, 0.0f), DOUBLE(double.class, 0.0);
|
||||
|
||||
private static final Map<Class<?>, DefaultPrimitiveTypes> map =
|
||||
new HashMap<Class<?>, DefaultPrimitiveTypes>();
|
||||
private static final Map<Class<?>, DefaultPrimitiveTypes> map = new HashMap<Class<?>, DefaultPrimitiveTypes>();
|
||||
|
||||
static {
|
||||
for (DefaultPrimitiveTypes type : DefaultPrimitiveTypes.values()) {
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -23,6 +22,4 @@ public interface Drafted<T> extends MapExportable {
|
|||
Set<String> mutated();
|
||||
|
||||
T build();
|
||||
|
||||
Set<String> read();
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -17,6 +16,7 @@
|
|||
package net.helenus.core.reflect;
|
||||
|
||||
import com.datastax.driver.core.Metadata;
|
||||
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
|
||||
public interface DslExportable {
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,7 +15,6 @@
|
|||
*/
|
||||
package net.helenus.core.reflect;
|
||||
|
||||
import com.datastax.driver.core.*;
|
||||
import java.lang.reflect.InvocationHandler;
|
||||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.Proxy;
|
||||
|
@ -24,6 +22,9 @@ import java.util.Collection;
|
|||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
import com.datastax.driver.core.*;
|
||||
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.mapping.HelenusMappingEntity;
|
||||
|
@ -45,10 +46,7 @@ public class DslInvocationHandler<E> implements InvocationHandler {
|
|||
private HelenusEntity entity = null;
|
||||
private Metadata metadata = null;
|
||||
|
||||
public DslInvocationHandler(
|
||||
Class<E> iface,
|
||||
ClassLoader classLoader,
|
||||
Optional<HelenusPropertyNode> parent,
|
||||
public DslInvocationHandler(Class<E> iface, ClassLoader classLoader, Optional<HelenusPropertyNode> parent,
|
||||
Metadata metadata) {
|
||||
|
||||
this.metadata = metadata;
|
||||
|
@ -77,12 +75,8 @@ public class DslInvocationHandler<E> implements InvocationHandler {
|
|||
|
||||
if (type instanceof UDTDataType && !UDTValue.class.isAssignableFrom(javaType)) {
|
||||
|
||||
Object childDsl =
|
||||
Helenus.dsl(
|
||||
javaType,
|
||||
classLoader,
|
||||
Optional.of(new HelenusPropertyNode(prop, parent)),
|
||||
metadata);
|
||||
Object childDsl = Helenus.dsl(javaType, classLoader,
|
||||
Optional.of(new HelenusPropertyNode(prop, parent)), metadata);
|
||||
|
||||
udtMap.put(prop.getGetterMethod(), childDsl);
|
||||
}
|
||||
|
@ -90,15 +84,10 @@ public class DslInvocationHandler<E> implements InvocationHandler {
|
|||
if (type instanceof DTDataType) {
|
||||
DTDataType dataType = (DTDataType) type;
|
||||
|
||||
if (dataType.getDataType() instanceof TupleType
|
||||
&& !TupleValue.class.isAssignableFrom(javaType)) {
|
||||
if (dataType.getDataType() instanceof TupleType && !TupleValue.class.isAssignableFrom(javaType)) {
|
||||
|
||||
Object childDsl =
|
||||
Helenus.dsl(
|
||||
javaType,
|
||||
classLoader,
|
||||
Optional.of(new HelenusPropertyNode(prop, parent)),
|
||||
metadata);
|
||||
Object childDsl = Helenus.dsl(javaType, classLoader,
|
||||
Optional.of(new HelenusPropertyNode(prop, parent)), metadata);
|
||||
|
||||
tupleMap.put(prop.getGetterMethod(), childDsl);
|
||||
}
|
||||
|
@ -126,9 +115,7 @@ public class DslInvocationHandler<E> implements InvocationHandler {
|
|||
return false;
|
||||
}
|
||||
|
||||
if (DslExportable.SET_METADATA_METHOD.equals(methodName)
|
||||
&& args.length == 1
|
||||
&& args[0] instanceof Metadata) {
|
||||
if (DslExportable.SET_METADATA_METHOD.equals(methodName) && args.length == 1 && args[0] instanceof Metadata) {
|
||||
if (metadata == null) {
|
||||
this.setCassandraMetadataForHelenusSession((Metadata) args[0]);
|
||||
}
|
||||
|
@ -182,7 +169,7 @@ public class DslInvocationHandler<E> implements InvocationHandler {
|
|||
DataType dt = dataType.getDataType();
|
||||
|
||||
switch (dt.getName()) {
|
||||
case TUPLE:
|
||||
case TUPLE :
|
||||
Object childDsl = tupleMap.get(method);
|
||||
|
||||
if (childDsl != null) {
|
||||
|
@ -191,16 +178,16 @@ public class DslInvocationHandler<E> implements InvocationHandler {
|
|||
|
||||
break;
|
||||
|
||||
case SET:
|
||||
case SET :
|
||||
return new SetDsl(new HelenusPropertyNode(prop, parent));
|
||||
|
||||
case LIST:
|
||||
case LIST :
|
||||
return new ListDsl(new HelenusPropertyNode(prop, parent));
|
||||
|
||||
case MAP:
|
||||
case MAP :
|
||||
return new MapDsl(new HelenusPropertyNode(prop, parent));
|
||||
|
||||
default:
|
||||
default :
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,73 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.helenus.core.reflect;
|
||||
|
||||
import net.helenus.core.Getter;
|
||||
|
||||
public interface Entity {
|
||||
String WRITTEN_AT_METHOD = "writtenAt";
|
||||
String TTL_OF_METHOD = "ttlOf";
|
||||
String TOKEN_OF_METHOD = "tokenOf";
|
||||
|
||||
/**
|
||||
* The write time for the property in question referenced by the getter.
|
||||
*
|
||||
* @param getter the property getter
|
||||
* @return the timestamp associated with the property identified by the getter
|
||||
*/
|
||||
default Long writtenAt(Getter getter) {
|
||||
return 0L;
|
||||
}
|
||||
|
||||
/**
|
||||
* The write time for the property in question referenced by the property name.
|
||||
*
|
||||
* @param prop the name of a property in this entity
|
||||
* @return the timestamp associated with the property identified by the property name if it exists
|
||||
*/
|
||||
default Long writtenAt(String prop) {
|
||||
return 0L;
|
||||
};
|
||||
|
||||
/**
|
||||
* The time-to-live for the property in question referenced by the getter.
|
||||
*
|
||||
* @param getter the property getter
|
||||
* @return the time-to-live in seconds associated with the property identified by the getter
|
||||
*/
|
||||
default Integer ttlOf(Getter getter) {
|
||||
return 0;
|
||||
};
|
||||
|
||||
/**
|
||||
* The time-to-live for the property in question referenced by the property name.
|
||||
*
|
||||
* @param prop the name of a property in this entity
|
||||
* @return the time-to-live in seconds associated with the property identified by the property name if it exists
|
||||
*/
|
||||
default Integer ttlOf(String prop) {
|
||||
return 0;
|
||||
};
|
||||
|
||||
/**
|
||||
* The token (partition identifier) for this entity which can change over time if
|
||||
* the cluster grows or shrinks but should be stable otherwise.
|
||||
*
|
||||
* @return the token for the entity
|
||||
*/
|
||||
default Long tokenOf() { return 0L; }
|
||||
}
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -20,7 +19,9 @@ import java.lang.annotation.Annotation;
|
|||
import java.lang.reflect.Method;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Function;
|
||||
|
||||
import javax.validation.ConstraintValidator;
|
||||
|
||||
import net.helenus.core.SessionRepository;
|
||||
import net.helenus.mapping.*;
|
||||
import net.helenus.mapping.type.AbstractDataType;
|
||||
|
@ -64,11 +65,6 @@ public final class HelenusNamedProperty implements HelenusProperty {
|
|||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isIdempotent() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<?> getJavaType() {
|
||||
throw new HelenusMappingException("will never called");
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -18,6 +17,7 @@ package net.helenus.core.reflect;
|
|||
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -17,6 +16,7 @@
|
|||
package net.helenus.core.reflect;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
import net.helenus.support.DslPropertyException;
|
||||
import net.helenus.support.HelenusMappingException;
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -20,6 +19,7 @@ import java.util.Collection;
|
|||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
import net.helenus.support.DslPropertyException;
|
||||
import net.helenus.support.HelenusMappingException;
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -17,25 +16,10 @@
|
|||
package net.helenus.core.reflect;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import net.helenus.core.Getter;
|
||||
|
||||
public interface MapExportable {
|
||||
String TO_MAP_METHOD = "toMap";
|
||||
String TO_READ_SET_METHOD = "toReadSet";
|
||||
String PUT_METHOD = "put";
|
||||
|
||||
public static final String TO_MAP_METHOD = "toMap";
|
||||
|
||||
Map<String, Object> toMap();
|
||||
|
||||
default Map<String, Object> toMap(boolean mutable) {
|
||||
return null;
|
||||
}
|
||||
|
||||
default Set<String> toReadSet() {
|
||||
return null;
|
||||
}
|
||||
|
||||
default void put(String key, Object value) {}
|
||||
|
||||
default <T> void put(Getter<T> getter, T value) {}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,32 +15,23 @@
|
|||
*/
|
||||
package net.helenus.core.reflect;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import java.io.InvalidObjectException;
|
||||
import java.io.ObjectInputStream;
|
||||
import java.io.ObjectStreamException;
|
||||
import java.io.Serializable;
|
||||
import java.lang.invoke.MethodHandles;
|
||||
import java.lang.reflect.Constructor;
|
||||
import java.lang.reflect.InvocationHandler;
|
||||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.Proxy;
|
||||
import java.util.*;
|
||||
import net.helenus.core.Getter;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.cache.CacheUtil;
|
||||
import net.helenus.mapping.MappingUtil;
|
||||
import net.helenus.mapping.annotation.Transient;
|
||||
import net.helenus.mapping.value.ValueProviderMap;
|
||||
import net.helenus.support.HelenusException;
|
||||
|
||||
public class MapperInvocationHandler<E> implements InvocationHandler, Serializable {
|
||||
private static final long serialVersionUID = -7044209982830584984L;
|
||||
|
||||
private Map<String, Object> src;
|
||||
private final Set<String> read = new HashSet<String>();
|
||||
private final Map<String, Object> src;
|
||||
private final Class<E> iface;
|
||||
|
||||
public MapperInvocationHandler(Class<E> iface, Map<String, Object> src) {
|
||||
|
@ -57,30 +47,18 @@ public class MapperInvocationHandler<E> implements InvocationHandler, Serializab
|
|||
// https://zeroturnaround.com/rebellabs/recognize-and-conquer-java-proxies-default-methods-and-method-handles/
|
||||
|
||||
// First, we need an instance of a private inner-class found in MethodHandles.
|
||||
Constructor<MethodHandles.Lookup> constructor =
|
||||
MethodHandles.Lookup.class.getDeclaredConstructor(Class.class, int.class);
|
||||
Constructor<MethodHandles.Lookup> constructor = MethodHandles.Lookup.class.getDeclaredConstructor(Class.class,
|
||||
int.class);
|
||||
constructor.setAccessible(true);
|
||||
|
||||
// Now we need to lookup and invoke special the default method on the interface
|
||||
// class.
|
||||
final Class<?> declaringClass = method.getDeclaringClass();
|
||||
Object result =
|
||||
constructor
|
||||
.newInstance(declaringClass, MethodHandles.Lookup.PRIVATE)
|
||||
.unreflectSpecial(method, declaringClass)
|
||||
.bindTo(proxy)
|
||||
.invokeWithArguments(args);
|
||||
Object result = constructor.newInstance(declaringClass, MethodHandles.Lookup.PRIVATE)
|
||||
.unreflectSpecial(method, declaringClass).bindTo(proxy).invokeWithArguments(args);
|
||||
return result;
|
||||
}
|
||||
|
||||
private Object writeReplace() {
|
||||
return new SerializationProxy<E>(this);
|
||||
}
|
||||
|
||||
private void readObject(ObjectInputStream stream) throws InvalidObjectException {
|
||||
throw new InvalidObjectException("Proxy required.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
|
||||
|
||||
|
@ -101,96 +79,12 @@ public class MapperInvocationHandler<E> implements InvocationHandler, Serializab
|
|||
return true;
|
||||
}
|
||||
}
|
||||
if (otherObj instanceof MapExportable) {
|
||||
return MappingUtil.compareMaps((MapExportable) otherObj, src);
|
||||
if (otherObj instanceof MapExportable && src.equals(((MapExportable) otherObj).toMap())) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
if (MapExportable.PUT_METHOD.equals(methodName) && method.getParameterCount() == 2) {
|
||||
final String key;
|
||||
if (args[0] instanceof String) {
|
||||
key = (String) args[0];
|
||||
} else if (args[0] instanceof Getter) {
|
||||
key = MappingUtil.resolveMappingProperty((Getter) args[0]).getProperty().getPropertyName();
|
||||
} else {
|
||||
key = null;
|
||||
}
|
||||
if (key != null) {
|
||||
final Object value = (Object) args[1];
|
||||
if (src instanceof ValueProviderMap) {
|
||||
this.src = fromValueProviderMap(src);
|
||||
}
|
||||
src.put(key, value);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
if (Entity.WRITTEN_AT_METHOD.equals(methodName) && method.getParameterCount() == 1) {
|
||||
final String key;
|
||||
if (args[0] instanceof String) {
|
||||
key = CacheUtil.writeTimeKey((String) args[0]);
|
||||
} else if (args[0] instanceof Getter) {
|
||||
Getter getter = (Getter) args[0];
|
||||
key =
|
||||
CacheUtil.writeTimeKey(
|
||||
MappingUtil.resolveMappingProperty(getter)
|
||||
.getProperty()
|
||||
.getColumnName()
|
||||
.toCql(false));
|
||||
} else {
|
||||
return 0L;
|
||||
}
|
||||
Long v = (Long) src.get(key);
|
||||
if (v != null) {
|
||||
return v;
|
||||
}
|
||||
return 0L;
|
||||
}
|
||||
|
||||
if (Entity.TOKEN_OF_METHOD.equals(methodName) && method.getParameterCount() == 0) {
|
||||
Long v = (Long) src.get("");
|
||||
if (v != null) {
|
||||
return v;
|
||||
}
|
||||
return 0L;
|
||||
}
|
||||
|
||||
if (Entity.TTL_OF_METHOD.equals(methodName) && method.getParameterCount() == 1) {
|
||||
final String key;
|
||||
if (args[0] instanceof String) {
|
||||
key = CacheUtil.ttlKey((String) args[0]);
|
||||
} else if (args[0] instanceof Getter) {
|
||||
Getter getter = (Getter) args[0];
|
||||
key =
|
||||
CacheUtil.ttlKey(
|
||||
MappingUtil.resolveMappingProperty(getter)
|
||||
.getProperty()
|
||||
.getColumnName()
|
||||
.toCql(false));
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
int v[] = (int[]) src.get(key);
|
||||
if (v != null) {
|
||||
return v[0];
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (MapExportable.TO_MAP_METHOD.equals(methodName)) {
|
||||
if (method.getParameterCount() == 1 && args[0] instanceof Boolean) {
|
||||
if ((boolean) args[0] == true) {
|
||||
return fromValueProviderMap(src, true);
|
||||
}
|
||||
}
|
||||
return Collections.unmodifiableMap(src);
|
||||
}
|
||||
|
||||
if (MapExportable.TO_READ_SET_METHOD.equals(methodName)) {
|
||||
return read;
|
||||
}
|
||||
|
||||
if (method.getParameterCount() != 0 || method.getReturnType() == void.class) {
|
||||
throw new HelenusException("invalid getter method " + method);
|
||||
}
|
||||
|
@ -203,33 +97,30 @@ public class MapperInvocationHandler<E> implements InvocationHandler, Serializab
|
|||
return iface.getSimpleName() + ": " + src.toString();
|
||||
}
|
||||
|
||||
if ("writeReplace".equals(methodName)) {
|
||||
return new SerializationProxy(this);
|
||||
}
|
||||
|
||||
if ("readObject".equals(methodName)) {
|
||||
throw new InvalidObjectException("Proxy required.");
|
||||
}
|
||||
|
||||
if ("dsl".equals(methodName)) {
|
||||
return Helenus.dsl(iface);
|
||||
}
|
||||
|
||||
final Object value = src.get(methodName);
|
||||
read.add(methodName);
|
||||
if (MapExportable.TO_MAP_METHOD.equals(methodName)) {
|
||||
return Collections.unmodifiableMap(src);
|
||||
}
|
||||
|
||||
if (value == null) {
|
||||
Object value = src.get(methodName);
|
||||
|
||||
Class<?> returnType = method.getReturnType();
|
||||
|
||||
if (value == null) {
|
||||
|
||||
// Default implementations of non-Transient methods in entities are the default
|
||||
// value when the map contains 'null'.
|
||||
// value when the
|
||||
// map contains 'null'.
|
||||
if (method.isDefault()) {
|
||||
return invokeDefault(proxy, method, args);
|
||||
}
|
||||
|
||||
// Otherwise, if the return type of the method is a primitive Java type then
|
||||
// we'll return the standard default values to avoid a NPE in user code.
|
||||
// we'll return the standard
|
||||
// default values to avoid a NPE in user code.
|
||||
if (returnType.isPrimitive()) {
|
||||
DefaultPrimitiveTypes type = DefaultPrimitiveTypes.lookup(returnType);
|
||||
if (type == null) {
|
||||
|
@ -241,54 +132,4 @@ public class MapperInvocationHandler<E> implements InvocationHandler, Serializab
|
|||
|
||||
return value;
|
||||
}
|
||||
|
||||
static Map<String, Object> fromValueProviderMap(Map v) {
|
||||
return fromValueProviderMap(v, false);
|
||||
}
|
||||
|
||||
static Map<String, Object> fromValueProviderMap(Map v, boolean mutable) {
|
||||
if (v instanceof ValueProviderMap) {
|
||||
Map<String, Object> m = new HashMap<String, Object>(v.size());
|
||||
Set<String> keys = v.keySet();
|
||||
for (String key : keys) {
|
||||
Object value = v.get(key);
|
||||
if (value != null && mutable) {
|
||||
if (ImmutableList.class.isAssignableFrom(value.getClass())) {
|
||||
m.put(key, new ArrayList((List) value));
|
||||
} else if (ImmutableMap.class.isAssignableFrom(value.getClass())) {
|
||||
m.put(key, new HashMap((Map) value));
|
||||
} else if (ImmutableSet.class.isAssignableFrom(value.getClass())) {
|
||||
m.put(key, new HashSet((Set) value));
|
||||
} else {
|
||||
m.put(key, value);
|
||||
}
|
||||
} else {
|
||||
m.put(key, value);
|
||||
}
|
||||
}
|
||||
return m;
|
||||
}
|
||||
return v;
|
||||
}
|
||||
|
||||
static class SerializationProxy<E> implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = -5617583940055969353L;
|
||||
|
||||
private final Class<E> iface;
|
||||
private final Map<String, Object> src;
|
||||
|
||||
public SerializationProxy(MapperInvocationHandler mapper) {
|
||||
this.iface = mapper.iface;
|
||||
if (mapper.src instanceof ValueProviderMap) {
|
||||
this.src = fromValueProviderMap(mapper.src);
|
||||
} else {
|
||||
this.src = mapper.src;
|
||||
}
|
||||
}
|
||||
|
||||
Object readResolve() throws ObjectStreamException {
|
||||
return new MapperInvocationHandler(iface, src);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,9 +15,11 @@
|
|||
*/
|
||||
package net.helenus.core.reflect;
|
||||
|
||||
import com.datastax.driver.core.Metadata;
|
||||
import java.lang.reflect.Proxy;
|
||||
import java.util.Optional;
|
||||
|
||||
import com.datastax.driver.core.Metadata;
|
||||
|
||||
import net.helenus.core.DslInstantiator;
|
||||
|
||||
public enum ReflectionDslInstantiator implements DslInstantiator {
|
||||
|
@ -26,15 +27,10 @@ public enum ReflectionDslInstantiator implements DslInstantiator {
|
|||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public <E> E instantiate(
|
||||
Class<E> iface,
|
||||
ClassLoader classLoader,
|
||||
Optional<HelenusPropertyNode> parent,
|
||||
public <E> E instantiate(Class<E> iface, ClassLoader classLoader, Optional<HelenusPropertyNode> parent,
|
||||
Metadata metadata) {
|
||||
DslInvocationHandler<E> handler =
|
||||
new DslInvocationHandler<E>(iface, classLoader, parent, metadata);
|
||||
E proxy =
|
||||
(E) Proxy.newProxyInstance(classLoader, new Class[] {iface, DslExportable.class}, handler);
|
||||
DslInvocationHandler<E> handler = new DslInvocationHandler<E>(iface, classLoader, parent, metadata);
|
||||
E proxy = (E) Proxy.newProxyInstance(classLoader, new Class[]{iface, DslExportable.class}, handler);
|
||||
return proxy;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -20,7 +19,8 @@ import net.helenus.support.HelenusMappingException;
|
|||
|
||||
public final class ReflectionInstantiator {
|
||||
|
||||
private ReflectionInstantiator() {}
|
||||
private ReflectionInstantiator() {
|
||||
}
|
||||
|
||||
public static <T> T instantiateClass(Class<T> clazz) {
|
||||
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,9 +15,9 @@
|
|||
*/
|
||||
package net.helenus.core.reflect;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.lang.reflect.Proxy;
|
||||
import java.util.Map;
|
||||
|
||||
import net.helenus.core.MapperInstantiator;
|
||||
|
||||
public enum ReflectionMapperInstantiator implements MapperInstantiator {
|
||||
|
@ -29,10 +28,7 @@ public enum ReflectionMapperInstantiator implements MapperInstantiator {
|
|||
public <E> E instantiate(Class<E> iface, Map<String, Object> src, ClassLoader classLoader) {
|
||||
|
||||
MapperInvocationHandler<E> handler = new MapperInvocationHandler<E>(iface, src);
|
||||
E proxy =
|
||||
(E)
|
||||
Proxy.newProxyInstance(
|
||||
classLoader, new Class[] {iface, MapExportable.class, Serializable.class}, handler);
|
||||
E proxy = (E) Proxy.newProxyInstance(classLoader, new Class[]{iface, MapExportable.class}, handler);
|
||||
return proxy;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -19,6 +18,7 @@ package net.helenus.core.reflect;
|
|||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
import java.util.Set;
|
||||
|
||||
import net.helenus.support.HelenusMappingException;
|
||||
|
||||
public final class SetDsl<V> implements Set<V> {
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -17,6 +16,7 @@
|
|||
package net.helenus.mapping;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
|
||||
import net.helenus.mapping.annotation.ClusteringColumn;
|
||||
import net.helenus.mapping.annotation.Column;
|
||||
import net.helenus.mapping.annotation.PartitionKey;
|
||||
|
@ -103,21 +103,13 @@ public final class ColumnInformation {
|
|||
private void ensureSingleColumnType(ColumnType columnTypeLocal, Method getter) {
|
||||
|
||||
if (columnTypeLocal != ColumnType.COLUMN) {
|
||||
throw new HelenusMappingException(
|
||||
"property can be annotated only by a single column type " + getter);
|
||||
throw new HelenusMappingException("property can be annotated only by a single column type " + getter);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ColumnInformation [columnName="
|
||||
+ columnName
|
||||
+ ", columnType="
|
||||
+ columnType
|
||||
+ ", ordinal="
|
||||
+ ordinal
|
||||
+ ", ordering="
|
||||
+ ordering
|
||||
+ "]";
|
||||
return "ColumnInformation [columnName=" + columnName + ", columnType=" + columnType + ", ordinal=" + ordinal
|
||||
+ ", ordering=" + ordering + "]";
|
||||
}
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue