Compare commits
107 commits
gburd/wip-
...
develop
Author | SHA1 | Date | |
---|---|---|---|
|
9db3500e3a | ||
|
d179539a31 | ||
|
06fe21d08e | ||
|
099fd999ee | ||
|
662a697d03 | ||
b449817659 | |||
|
e6051b12e9 | ||
|
9f511cde74 | ||
|
4b9187ebe5 | ||
|
93a81e7fd0 | ||
|
3169d0c100 | ||
|
b27bc7d9a9 | ||
|
1f4c2154e2 | ||
|
6788cea1a0 | ||
|
9d94e865b6 | ||
|
654f4434bf | ||
|
6c245c121e | ||
|
ef455ac032 | ||
|
b1e333009c | ||
|
ca6afc326c | ||
|
af4156079d | ||
|
b023ec359b | ||
|
76b603f3d3 | ||
|
d69d8a3b1e | ||
|
6858cf6f48 | ||
5215749de1 | |||
|
2299939be3 | ||
|
0ddacec354 | ||
|
287e1a5b8b | ||
|
8b9d582fa5 | ||
|
96a8476fd8 | ||
|
f168b33f6a | ||
|
11de7015c2 | ||
|
f9b1563bdd | ||
|
e2f45f82c9 | ||
|
27dd9a4eff | ||
|
26f41dab75 | ||
|
1da822ce57 | ||
|
1ef50ae179 | ||
|
26c67e391a | ||
|
3554b7ecb5 | ||
|
7b4e46431f | ||
|
60b040e7a9 | ||
|
7a470bd5d7 | ||
|
0827291253 | ||
|
50f656bc8a | ||
|
9df97b3e44 | ||
|
33b4b35912 | ||
|
1eccb631f3 | ||
|
e932d0dcf2 | ||
|
618a7ea380 | ||
|
7a56059036 | ||
|
33d2459538 | ||
|
a63a1be4b6 | ||
|
d30361538c | ||
|
a993af6c29 | ||
|
39a8643103 | ||
|
c025dc35a7 | ||
|
d19a9c741d | ||
|
6ff188f241 | ||
|
2f0801d36f | ||
|
b4dca9c710 | ||
|
41e5d8c1e5 | ||
|
5570a97dff | ||
|
a198989a76 | ||
|
eb22e3c72e | ||
|
a79e7dacf1 | ||
|
fe47531984 | ||
|
a600c0bd23 | ||
|
b777e9c051 | ||
|
0b86d33725 | ||
|
690cd1e064 | ||
|
ef4f9054ac | ||
|
465c7d6f2a | ||
|
864c4af5af | ||
|
d1fe54b0ce | ||
|
962145bf46 | ||
|
e4cda1a268 | ||
|
377191f12a | ||
|
792d2b6598 | ||
|
2ee300e420 | ||
|
13eaa7e7ea | ||
|
5905663c58 | ||
|
857eadff45 | ||
|
e1884cf52d | ||
|
09a7fbc405 | ||
|
01a458a7f6 | ||
|
48545c1e84 | ||
|
dcea254fdb | ||
|
5605824886 | ||
|
c3f9b83770 | ||
|
52dab5872c | ||
|
a65b775faa | ||
|
62be0e6ccd | ||
|
7b14eda9b3 | ||
|
92f74e41e1 | ||
|
b04e033bf4 | ||
|
c7e37acc5a | ||
|
e5918cd1e8 | ||
|
7535e9ade7 | ||
|
8a7dbfdec1 | ||
|
18f2a057de | ||
|
1642f09ce9 | ||
|
c6b95f12b4 | ||
|
a1e29c62c8 | ||
|
25eb81219d | ||
|
a3b9ff9af3 |
307 changed files with 16384 additions and 13016 deletions
|
@ -3,7 +3,6 @@
|
|||
<component name="EclipseCodeFormatterProjectSettings">
|
||||
<option name="projectSpecificProfile">
|
||||
<ProjectSpecificProfile>
|
||||
<option name="formatter" value="ECLIPSE" />
|
||||
<option name="pathToConfigFileJava" value="$PROJECT_DIR$/../newton/formatting/onshape-eclipse-general-preferences.epf" />
|
||||
</ProjectSpecificProfile>
|
||||
</option>
|
||||
|
|
274
NOTES
274
NOTES
|
@ -1,172 +1,35 @@
|
|||
Operation/
|
||||
|-- AbstractStatementOperation
|
||||
| |-- AbstractOperation
|
||||
| | |-- AbstractFilterOperation
|
||||
| | | |-- CountOperation
|
||||
| | | |-- DeleteOperation
|
||||
| | | `-- UpdateOperation
|
||||
| | |-- BoundOperation
|
||||
| | `-- InsertOperation
|
||||
| |-- AbstractOptionalOperation
|
||||
| | |-- AbstractFilterOptionalOperation
|
||||
| | | |-- SelectFirstOperation
|
||||
| | | `-- SelectFirstTransformingOperation
|
||||
| | `-- BoundOptionalOperation
|
||||
| `-- AbstractStreamOperation
|
||||
| |-- AbstractFilterStreamOperation
|
||||
| | |-- SelectOperation
|
||||
| | `-- SelectTransformingOperation
|
||||
| `-- BoundStreamOperation
|
||||
|-- PreparedOperation
|
||||
|-- PreparedOptionalOperation
|
||||
`-- PreparedStreamOperation
|
||||
|
||||
|
||||
----
|
||||
@CompoundIndex()
|
||||
create a new col in the same table called __idx_a_b_c that the hash of the concatenated values in that order is stored, create a normal index for that (CREATE INDEX ...)
|
||||
if a query matches that set of columns then use that indexed col to fetch the desired results from that table
|
||||
could also work with .in() query if materialized view exists
|
||||
----
|
||||
|
||||
|
||||
|
||||
--- Cache
|
||||
// `E` is the type of the Entity class or one of:
|
||||
// - ResultSet
|
||||
// - ArrayTuple{N}
|
||||
// - Count
|
||||
// `F` is the type argument passed to us from HelenusSession DSL and carried on via one of the
|
||||
// Operation classes, it is going to be one of:
|
||||
// - ResultSet
|
||||
// - ArrayTuple{N}
|
||||
// - or a type previously registered as a HelenusEntity.
|
||||
// In the form of a:
|
||||
// - Stream<?> or an
|
||||
// - Optional<?>
|
||||
//
|
||||
// Operation/
|
||||
// |-- AbstractStatementOperation
|
||||
// | |-- AbstractOperation
|
||||
// | | |-- AbstractFilterOperation
|
||||
// | | | |-- CountOperation
|
||||
// | | | |-- DeleteOperation
|
||||
// | | | `-- UpdateOperation
|
||||
// | | |-- BoundOperation
|
||||
// | | `-- InsertOperation
|
||||
// | |-- AbstractOptionalOperation
|
||||
// | | |-- AbstractFilterOptionalOperation
|
||||
// | | | |-- SelectFirstOperation
|
||||
// | | | `-- SelectFirstTransformingOperation
|
||||
// | | `-- BoundOptionalOperation
|
||||
// | `-- AbstractStreamOperation
|
||||
// | |-- AbstractFilterStreamOperation
|
||||
// | | |-- SelectOperation
|
||||
// | | `-- SelectTransformingOperation
|
||||
// | `-- BoundStreamOperation
|
||||
// |-- PreparedOperation
|
||||
// |-- PreparedOptionalOperation
|
||||
// `-- PreparedStreamOperation
|
||||
//
|
||||
// These all boil down to: Select, Update, Insert, Delete and Count
|
||||
//
|
||||
// -- Select:
|
||||
// 1) Select statements that contain all primary key information will be "distinct" and
|
||||
// result in a single value or no match.
|
||||
// If present, return cached entity otherwise execute query and cache result.
|
||||
//
|
||||
// 2) Otherwise the result is a set, possibly empty, of values that match.
|
||||
// When within a UOW:
|
||||
// If present, return the cached value(s) from the statement cache matching the query string.
|
||||
// Otherwise, execute query and cache the result in the statement cache and update/merge the
|
||||
// entites into the entity cache.
|
||||
// NOTE: When we read data from the database we augment the select clause with TTL and write time
|
||||
// stamps for all columns that record such information so as to be able to properlty expire
|
||||
// and merge values in the cache.
|
||||
//
|
||||
// -- Update:
|
||||
// Execute the database statement and then iff successs upsert the entity being updated into the
|
||||
// entity cache.
|
||||
//
|
||||
// -- Insert/Upsert:
|
||||
// Same as Update.
|
||||
//
|
||||
// -- Delete:
|
||||
// Same as update, only remove the cached value from all caches on success.
|
||||
//
|
||||
// -- Count:
|
||||
// If operating within a UOW lookup count in statement cache, if not present execute query and cache result.
|
||||
//
|
||||
|
||||
|
||||
if (delegate instanceof SelectOperation) {
|
||||
SelectOperation<E> op = (SelectOperation<E>) delegate;
|
||||
|
||||
// Determine if we are caching and if so where.
|
||||
AbstractCache<CacheKey, Set<E>> cache = delegate.getCache();
|
||||
boolean prepareStatementForCaching = cache != null;
|
||||
if (uow != null) {
|
||||
prepareStatementForCaching = true;
|
||||
cache = uow.<Set<E>>getCacheEnclosing(cache);
|
||||
}
|
||||
|
||||
// The delegate will provide the cache key becuase it will either be:
|
||||
// a) when distinct: the combination of the partition/cluster key columns
|
||||
// b) otherwise: the table name followed by the portion of the SQL statement that would form the WHERE clause
|
||||
CacheKey key = (cache == null) ? null : delegate.getCacheKey();
|
||||
if (key != null && cache != null) {
|
||||
Set<E> value = cache.get(key);
|
||||
if (value != null) {
|
||||
// Select will always return a Stream<E>
|
||||
// TODO(gburd): SelectTransforming... apply fn here?
|
||||
result = (E) value.stream();
|
||||
if (cacheHitCounter != null) {
|
||||
cacheHitCounter.inc();
|
||||
}
|
||||
if (log != null) {
|
||||
log.info("cache hit");
|
||||
}
|
||||
return result;
|
||||
} else {
|
||||
if (cacheMissCounter != null) {
|
||||
cacheMissCounter.inc();
|
||||
}
|
||||
if (log != null) {
|
||||
log.info("cache miss");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
if (cache != null) {
|
||||
Object obj = delegate.unwrap(result);
|
||||
if (obj != null) {
|
||||
cache.put(key, obj);
|
||||
}
|
||||
|
||||
delegate.<E>extract(result, key, cache);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
// TODO: first, ask the delegate for the cacheKey
|
||||
// if this is a SELECT query:
|
||||
// if not in cache build the statement, execute the future, cache the result, transform the result then cache the transformations
|
||||
// if INSERT/UPSERT/UPDATE
|
||||
// if DELETE
|
||||
// if COUNT
|
||||
----------------------------
|
||||
|
||||
@Override
|
||||
public CacheKey getCacheKey() {
|
||||
|
||||
List<String>keys = new ArrayList<>(filters.size());
|
||||
HelenusEntity entity = props.get(0).getEntity();
|
||||
|
||||
for (HelenusPropertyNode prop : props) {
|
||||
switch(prop.getProperty().getColumnType()) {
|
||||
case PARTITION_KEY:
|
||||
case CLUSTERING_COLUMN:
|
||||
|
||||
Filter filter = filters.get(prop.getProperty());
|
||||
if (filter != null) {
|
||||
keys.add(filter.toString());
|
||||
} else {
|
||||
// we're missing a part of the primary key, so we can't create a proper cache key
|
||||
return null;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
// We've past the primary key components in this ordered list, so we're done building
|
||||
// the cache key.
|
||||
if (keys.size() > 0) {
|
||||
return new CacheKey(entity, Joiner.on(",").join(keys));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
---------------------------
|
||||
|
||||
// TODO(gburd): create a statement that matches one that wasn't prepared
|
||||
//String key =
|
||||
// "use " + preparedStatement.getQueryKeyspace() + "; " + preparedStatement.getQueryString();
|
||||
|
@ -175,64 +38,6 @@
|
|||
//}
|
||||
|
||||
|
||||
------------------------
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.ResultSetFuture;
|
||||
import com.datastax.driver.core.Statement;
|
||||
import com.google.common.cache.Cache;
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import com.google.common.cache.RemovalListener;
|
||||
import com.google.common.cache.RemovalNotification;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
public abstract class AbstractCache<K, V> {
|
||||
final Logger logger = LoggerFactory.getLogger(getClass());
|
||||
public Cache<K, V> cache;
|
||||
|
||||
public AbstractCache() {
|
||||
RemovalListener<K, V> listener =
|
||||
new RemovalListener<K, V>() {
|
||||
@Override
|
||||
public void onRemoval(RemovalNotification<K, V> n) {
|
||||
if (n.wasEvicted()) {
|
||||
String cause = n.getCause().name();
|
||||
logger.info(cause);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
cache = CacheBuilder.newBuilder()
|
||||
.maximumSize(10_000)
|
||||
.expireAfterAccess(20, TimeUnit.MINUTES)
|
||||
.weakKeys()
|
||||
.softValues()
|
||||
.removalListener(listener)
|
||||
.build();
|
||||
}
|
||||
|
||||
V get(K key) {
|
||||
return cache.getIfPresent(key);
|
||||
}
|
||||
|
||||
void put(K key, V value) {
|
||||
cache.put(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
------------------------------------------------------------------------------------------------
|
||||
|
||||
cache entites (2 methods) marked @Cacheable
|
||||
cache entites in txn context
|
||||
cache results when .cache() chained before .{a}sync() call, return a EvictableCacheItem<E> that has an .evict() method
|
||||
fix txn .andThen() chains
|
||||
|
||||
|
||||
|
||||
|
||||
primitive types have default values, (e.g. boolean, int, ...) but primative wrapper classes do not and can be null (e.g. Boolean, Integer, ...)
|
||||
|
@ -372,3 +177,26 @@ begin:
|
|||
cache.put
|
||||
}
|
||||
*/
|
||||
------------------
|
||||
|
||||
InsertOperation
|
||||
|
||||
|
||||
Class<?> iface = entity.getMappingInterface();
|
||||
boolean includesNonIdentityValues = values.stream().map(t -> {
|
||||
ColumnType type = t._1.getProperty().getColumnType();
|
||||
return !((type == ColumnType.PARTITION_KEY) || (type == ColumnType.CLUSTERING_COLUMN));
|
||||
})
|
||||
.reduce(false, (acc, t) -> acc || t);
|
||||
if (resultType == iface) {
|
||||
if (values.size() > 0 && includesNonIdentityValues) {
|
||||
boolean immutable = iface.isAssignableFrom(Drafted.class);
|
||||
-------------------
|
||||
|
||||
final Object value;
|
||||
if (method.getParameterCount() == 1 && args[0] instanceof Boolean && src instanceof ValueProviderMap) {
|
||||
value = ((ValueProviderMap)src).get(methodName, (Boolean)args[0]);
|
||||
} else {
|
||||
value = src.get(methodName);
|
||||
}
|
||||
--------------------
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
#!/bin/bash
|
||||
#!/usr/bin/env bash
|
||||
|
||||
mvn clean jar:jar javadoc:jar source:jar deploy -Prelease
|
||||
|
|
|
@ -1,7 +1,14 @@
|
|||
#!/bin/bash
|
||||
#!/usr/bin/env bash
|
||||
|
||||
for f in $(find ./src -name \*.java); do
|
||||
if [ "X$1" == "Xall" ]; then
|
||||
for f in $(find ./src -name \*.java); do
|
||||
echo Formatting $f
|
||||
java -jar ./lib/google-java-format-1.3-all-deps.jar --replace $f
|
||||
done
|
||||
done
|
||||
else
|
||||
for file in $(git status --short | awk '{print $2}'); do
|
||||
echo $file
|
||||
java -jar ./lib/google-java-format-1.3-all-deps.jar --replace $file
|
||||
done
|
||||
fi
|
||||
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
#!/bin/bash
|
||||
#!/usr/bin/env bash
|
||||
|
||||
mvn clean jar:jar javadoc:jar source:jar install -Prelease
|
||||
|
|
90
build.gradle
90
build.gradle
|
@ -1,90 +0,0 @@
|
|||
// gradle wrapper
|
||||
// ./gradlew clean generateLock saveLock
|
||||
// ./gradlew compileJava
|
||||
// ./gradlew run
|
||||
// ./gradlew run --debug-jvm
|
||||
// ./gradlew publishToMavenLocal
|
||||
|
||||
|
||||
buildscript {
|
||||
ext {}
|
||||
repositories {
|
||||
jcenter()
|
||||
mavenLocal()
|
||||
mavenCentral()
|
||||
maven { url "https://clojars.org/repo" }
|
||||
maven { url "https://plugins.gradle.org/m2/" }
|
||||
}
|
||||
dependencies {
|
||||
classpath 'com.netflix.nebula:gradle-dependency-lock-plugin:4.+'
|
||||
classpath 'com.uber:okbuck:0.19.0'
|
||||
}
|
||||
}
|
||||
|
||||
apply plugin: 'java'
|
||||
apply plugin: 'idea'
|
||||
apply plugin: 'eclipse'
|
||||
apply plugin: 'java-library'
|
||||
apply plugin: 'maven-publish'
|
||||
apply plugin: 'com.uber.okbuck'
|
||||
apply plugin: 'nebula.dependency-lock'
|
||||
|
||||
task wrapper(type: Wrapper) {
|
||||
gradleVersion = '4.0.2'
|
||||
}
|
||||
|
||||
jar {
|
||||
baseName = 'helenus'
|
||||
group = 'net.helenus'
|
||||
version = '2.0.17-SNAPSHOT'
|
||||
}
|
||||
|
||||
description = """helenus"""
|
||||
|
||||
sourceCompatibility = 1.8
|
||||
targetCompatibility = 1.8
|
||||
tasks.withType(JavaCompile) {
|
||||
options.encoding = 'UTF-8'
|
||||
}
|
||||
|
||||
configurations.all {
|
||||
}
|
||||
|
||||
repositories {
|
||||
jcenter()
|
||||
mavenLocal()
|
||||
mavenCentral()
|
||||
maven { url "file:///Users/gburd/ws/helenus/lib" }
|
||||
maven { url "https://oss.sonatype.org/content/repositories/snapshots" }
|
||||
maven { url "http://repo.maven.apache.org/maven2" }
|
||||
}
|
||||
dependencies {
|
||||
compile group: 'com.datastax.cassandra', name: 'cassandra-driver-core', version: '3.3.0'
|
||||
compile group: 'org.aspectj', name: 'aspectjrt', version: '1.8.10'
|
||||
compile group: 'org.aspectj', name: 'aspectjweaver', version: '1.8.10'
|
||||
compile group: 'org.apache.commons', name: 'commons-lang3', version: '3.6'
|
||||
compile group: 'org.springframework', name: 'spring-core', version: '4.3.10.RELEASE'
|
||||
compile group: 'com.google.guava', name: 'guava', version: '20.0'
|
||||
compile group: 'com.diffplug.durian', name: 'durian', version: '3.+'
|
||||
compile group: 'io.zipkin.java', name: 'zipkin', version: '1.29.2'
|
||||
compile group: 'io.zipkin.brave', name: 'brave', version: '4.0.6'
|
||||
compile group: 'io.dropwizard.metrics', name: 'metrics-core', version: '3.2.2'
|
||||
compile group: 'javax.validation', name: 'validation-api', version: '2.0.0.CR3'
|
||||
compile group: 'org.slf4j', name: 'slf4j-api', version: '1.7.1'
|
||||
|
||||
runtime group: 'org.slf4j', name: 'jcl-over-slf4j', version: '1.7.1'
|
||||
|
||||
testCompile group: 'org.codehaus.jackson', name: 'jackson-mapper-asl', version: '1.9.13'
|
||||
testCompile group: 'com.anthemengineering.mojo', name: 'infer-maven-plugin', version: '0.1.0'
|
||||
testCompile group: 'org.codehaus.jackson', name: 'jackson-core-asl', version: '1.9.13'
|
||||
testCompile(group: 'org.cassandraunit', name: 'cassandra-unit', version: '3.1.4.0-SNAPSHOT') {
|
||||
exclude(module: 'cassandra-driver-core')
|
||||
}
|
||||
testCompile group: 'org.apache.cassandra', name: 'cassandra-all', version: '3.11.0'
|
||||
testCompile group: 'commons-io', name: 'commons-io', version: '2.5'
|
||||
testCompile group: 'junit', name: 'junit', version: '4.12'
|
||||
testCompile group: 'com.github.stephenc', name: 'jamm', version: '0.2.5'
|
||||
testCompile group: 'org.hamcrest', name: 'hamcrest-library', version: '1.3'
|
||||
testCompile group: 'org.hamcrest', name: 'hamcrest-core', version: '1.3'
|
||||
testCompile group: 'org.mockito', name: 'mockito-core', version: '2.8.47'
|
||||
}
|
|
@ -1,648 +0,0 @@
|
|||
{
|
||||
"compile": {
|
||||
"com.datastax.cassandra:cassandra-driver-core": {
|
||||
"locked": "3.3.0",
|
||||
"requested": "3.3.0"
|
||||
},
|
||||
"com.diffplug.durian:durian": {
|
||||
"locked": "3.5.0-SNAPSHOT",
|
||||
"requested": "3.+"
|
||||
},
|
||||
"com.google.guava:guava": {
|
||||
"locked": "20.0",
|
||||
"requested": "20.0"
|
||||
},
|
||||
"io.dropwizard.metrics:metrics-core": {
|
||||
"locked": "3.2.2",
|
||||
"requested": "3.2.2"
|
||||
},
|
||||
"io.zipkin.brave:brave": {
|
||||
"locked": "4.0.6",
|
||||
"requested": "4.0.6"
|
||||
},
|
||||
"io.zipkin.java:zipkin": {
|
||||
"locked": "1.29.2",
|
||||
"requested": "1.29.2"
|
||||
},
|
||||
"javax.validation:validation-api": {
|
||||
"locked": "2.0.0.CR3",
|
||||
"requested": "2.0.0.CR3"
|
||||
},
|
||||
"org.apache.commons:commons-lang3": {
|
||||
"locked": "3.6",
|
||||
"requested": "3.6"
|
||||
},
|
||||
"org.aspectj:aspectjrt": {
|
||||
"locked": "1.8.10",
|
||||
"requested": "1.8.10"
|
||||
},
|
||||
"org.aspectj:aspectjweaver": {
|
||||
"locked": "1.8.10",
|
||||
"requested": "1.8.10"
|
||||
},
|
||||
"org.slf4j:slf4j-api": {
|
||||
"locked": "1.7.25",
|
||||
"requested": "1.7.1"
|
||||
},
|
||||
"org.springframework:spring-core": {
|
||||
"locked": "4.3.10.RELEASE",
|
||||
"requested": "4.3.10.RELEASE"
|
||||
}
|
||||
},
|
||||
"compileClasspath": {
|
||||
"com.datastax.cassandra:cassandra-driver-core": {
|
||||
"locked": "3.3.0",
|
||||
"requested": "3.3.0"
|
||||
},
|
||||
"com.diffplug.durian:durian": {
|
||||
"locked": "3.5.0-SNAPSHOT",
|
||||
"requested": "3.+"
|
||||
},
|
||||
"com.google.guava:guava": {
|
||||
"locked": "20.0",
|
||||
"requested": "20.0"
|
||||
},
|
||||
"io.dropwizard.metrics:metrics-core": {
|
||||
"locked": "3.2.2",
|
||||
"requested": "3.2.2"
|
||||
},
|
||||
"io.zipkin.brave:brave": {
|
||||
"locked": "4.0.6",
|
||||
"requested": "4.0.6"
|
||||
},
|
||||
"io.zipkin.java:zipkin": {
|
||||
"locked": "1.29.2",
|
||||
"requested": "1.29.2"
|
||||
},
|
||||
"javax.validation:validation-api": {
|
||||
"locked": "2.0.0.CR3",
|
||||
"requested": "2.0.0.CR3"
|
||||
},
|
||||
"org.apache.commons:commons-lang3": {
|
||||
"locked": "3.6",
|
||||
"requested": "3.6"
|
||||
},
|
||||
"org.aspectj:aspectjrt": {
|
||||
"locked": "1.8.10",
|
||||
"requested": "1.8.10"
|
||||
},
|
||||
"org.aspectj:aspectjweaver": {
|
||||
"locked": "1.8.10",
|
||||
"requested": "1.8.10"
|
||||
},
|
||||
"org.slf4j:slf4j-api": {
|
||||
"locked": "1.7.25",
|
||||
"requested": "1.7.1"
|
||||
},
|
||||
"org.springframework:spring-core": {
|
||||
"locked": "4.3.10.RELEASE",
|
||||
"requested": "4.3.10.RELEASE"
|
||||
}
|
||||
},
|
||||
"default": {
|
||||
"com.datastax.cassandra:cassandra-driver-core": {
|
||||
"locked": "3.3.0",
|
||||
"requested": "3.3.0"
|
||||
},
|
||||
"com.diffplug.durian:durian": {
|
||||
"locked": "3.5.0-SNAPSHOT",
|
||||
"requested": "3.+"
|
||||
},
|
||||
"com.google.guava:guava": {
|
||||
"locked": "20.0",
|
||||
"requested": "20.0"
|
||||
},
|
||||
"io.dropwizard.metrics:metrics-core": {
|
||||
"locked": "3.2.2",
|
||||
"requested": "3.2.2"
|
||||
},
|
||||
"io.zipkin.brave:brave": {
|
||||
"locked": "4.0.6",
|
||||
"requested": "4.0.6"
|
||||
},
|
||||
"io.zipkin.java:zipkin": {
|
||||
"locked": "1.29.2",
|
||||
"requested": "1.29.2"
|
||||
},
|
||||
"javax.validation:validation-api": {
|
||||
"locked": "2.0.0.CR3",
|
||||
"requested": "2.0.0.CR3"
|
||||
},
|
||||
"org.apache.commons:commons-lang3": {
|
||||
"locked": "3.6",
|
||||
"requested": "3.6"
|
||||
},
|
||||
"org.aspectj:aspectjrt": {
|
||||
"locked": "1.8.10",
|
||||
"requested": "1.8.10"
|
||||
},
|
||||
"org.aspectj:aspectjweaver": {
|
||||
"locked": "1.8.10",
|
||||
"requested": "1.8.10"
|
||||
},
|
||||
"org.slf4j:jcl-over-slf4j": {
|
||||
"locked": "1.7.1",
|
||||
"requested": "1.7.1"
|
||||
},
|
||||
"org.slf4j:slf4j-api": {
|
||||
"locked": "1.7.25",
|
||||
"requested": "1.7.1"
|
||||
},
|
||||
"org.springframework:spring-core": {
|
||||
"locked": "4.3.10.RELEASE",
|
||||
"requested": "4.3.10.RELEASE"
|
||||
}
|
||||
},
|
||||
"runtime": {
|
||||
"com.datastax.cassandra:cassandra-driver-core": {
|
||||
"locked": "3.3.0",
|
||||
"requested": "3.3.0"
|
||||
},
|
||||
"com.diffplug.durian:durian": {
|
||||
"locked": "3.5.0-SNAPSHOT",
|
||||
"requested": "3.+"
|
||||
},
|
||||
"com.google.guava:guava": {
|
||||
"locked": "20.0",
|
||||
"requested": "20.0"
|
||||
},
|
||||
"io.dropwizard.metrics:metrics-core": {
|
||||
"locked": "3.2.2",
|
||||
"requested": "3.2.2"
|
||||
},
|
||||
"io.zipkin.brave:brave": {
|
||||
"locked": "4.0.6",
|
||||
"requested": "4.0.6"
|
||||
},
|
||||
"io.zipkin.java:zipkin": {
|
||||
"locked": "1.29.2",
|
||||
"requested": "1.29.2"
|
||||
},
|
||||
"javax.validation:validation-api": {
|
||||
"locked": "2.0.0.CR3",
|
||||
"requested": "2.0.0.CR3"
|
||||
},
|
||||
"org.apache.commons:commons-lang3": {
|
||||
"locked": "3.6",
|
||||
"requested": "3.6"
|
||||
},
|
||||
"org.aspectj:aspectjrt": {
|
||||
"locked": "1.8.10",
|
||||
"requested": "1.8.10"
|
||||
},
|
||||
"org.aspectj:aspectjweaver": {
|
||||
"locked": "1.8.10",
|
||||
"requested": "1.8.10"
|
||||
},
|
||||
"org.slf4j:jcl-over-slf4j": {
|
||||
"locked": "1.7.1",
|
||||
"requested": "1.7.1"
|
||||
},
|
||||
"org.slf4j:slf4j-api": {
|
||||
"locked": "1.7.25",
|
||||
"requested": "1.7.1"
|
||||
},
|
||||
"org.springframework:spring-core": {
|
||||
"locked": "4.3.10.RELEASE",
|
||||
"requested": "4.3.10.RELEASE"
|
||||
}
|
||||
},
|
||||
"runtimeClasspath": {
|
||||
"com.datastax.cassandra:cassandra-driver-core": {
|
||||
"locked": "3.3.0",
|
||||
"requested": "3.3.0"
|
||||
},
|
||||
"com.diffplug.durian:durian": {
|
||||
"locked": "3.5.0-SNAPSHOT",
|
||||
"requested": "3.+"
|
||||
},
|
||||
"com.google.guava:guava": {
|
||||
"locked": "20.0",
|
||||
"requested": "20.0"
|
||||
},
|
||||
"io.dropwizard.metrics:metrics-core": {
|
||||
"locked": "3.2.2",
|
||||
"requested": "3.2.2"
|
||||
},
|
||||
"io.zipkin.brave:brave": {
|
||||
"locked": "4.0.6",
|
||||
"requested": "4.0.6"
|
||||
},
|
||||
"io.zipkin.java:zipkin": {
|
||||
"locked": "1.29.2",
|
||||
"requested": "1.29.2"
|
||||
},
|
||||
"javax.validation:validation-api": {
|
||||
"locked": "2.0.0.CR3",
|
||||
"requested": "2.0.0.CR3"
|
||||
},
|
||||
"org.apache.commons:commons-lang3": {
|
||||
"locked": "3.6",
|
||||
"requested": "3.6"
|
||||
},
|
||||
"org.aspectj:aspectjrt": {
|
||||
"locked": "1.8.10",
|
||||
"requested": "1.8.10"
|
||||
},
|
||||
"org.aspectj:aspectjweaver": {
|
||||
"locked": "1.8.10",
|
||||
"requested": "1.8.10"
|
||||
},
|
||||
"org.slf4j:jcl-over-slf4j": {
|
||||
"locked": "1.7.1",
|
||||
"requested": "1.7.1"
|
||||
},
|
||||
"org.slf4j:slf4j-api": {
|
||||
"locked": "1.7.25",
|
||||
"requested": "1.7.1"
|
||||
},
|
||||
"org.springframework:spring-core": {
|
||||
"locked": "4.3.10.RELEASE",
|
||||
"requested": "4.3.10.RELEASE"
|
||||
}
|
||||
},
|
||||
"testCompile": {
|
||||
"com.anthemengineering.mojo:infer-maven-plugin": {
|
||||
"locked": "0.1.0",
|
||||
"requested": "0.1.0"
|
||||
},
|
||||
"com.datastax.cassandra:cassandra-driver-core": {
|
||||
"locked": "3.3.0",
|
||||
"requested": "3.3.0"
|
||||
},
|
||||
"com.diffplug.durian:durian": {
|
||||
"locked": "3.5.0-SNAPSHOT",
|
||||
"requested": "3.+"
|
||||
},
|
||||
"com.github.stephenc:jamm": {
|
||||
"locked": "0.2.5",
|
||||
"requested": "0.2.5"
|
||||
},
|
||||
"com.google.guava:guava": {
|
||||
"locked": "21.0",
|
||||
"requested": "20.0"
|
||||
},
|
||||
"commons-io:commons-io": {
|
||||
"locked": "2.5",
|
||||
"requested": "2.5"
|
||||
},
|
||||
"io.dropwizard.metrics:metrics-core": {
|
||||
"locked": "3.2.2",
|
||||
"requested": "3.2.2"
|
||||
},
|
||||
"io.zipkin.brave:brave": {
|
||||
"locked": "4.0.6",
|
||||
"requested": "4.0.6"
|
||||
},
|
||||
"io.zipkin.java:zipkin": {
|
||||
"locked": "1.29.2",
|
||||
"requested": "1.29.2"
|
||||
},
|
||||
"javax.validation:validation-api": {
|
||||
"locked": "2.0.0.CR3",
|
||||
"requested": "2.0.0.CR3"
|
||||
},
|
||||
"junit:junit": {
|
||||
"locked": "4.12",
|
||||
"requested": "4.12"
|
||||
},
|
||||
"org.apache.cassandra:cassandra-all": {
|
||||
"locked": "3.11.0",
|
||||
"requested": "3.11.0"
|
||||
},
|
||||
"org.apache.commons:commons-lang3": {
|
||||
"locked": "3.6",
|
||||
"requested": "3.6"
|
||||
},
|
||||
"org.aspectj:aspectjrt": {
|
||||
"locked": "1.8.10",
|
||||
"requested": "1.8.10"
|
||||
},
|
||||
"org.aspectj:aspectjweaver": {
|
||||
"locked": "1.8.10",
|
||||
"requested": "1.8.10"
|
||||
},
|
||||
"org.cassandraunit:cassandra-unit": {
|
||||
"locked": "3.1.4.0-SNAPSHOT",
|
||||
"requested": "3.1.4.0-SNAPSHOT"
|
||||
},
|
||||
"org.codehaus.jackson:jackson-core-asl": {
|
||||
"locked": "1.9.13",
|
||||
"requested": "1.9.13"
|
||||
},
|
||||
"org.codehaus.jackson:jackson-mapper-asl": {
|
||||
"locked": "1.9.13",
|
||||
"requested": "1.9.13"
|
||||
},
|
||||
"org.hamcrest:hamcrest-core": {
|
||||
"locked": "1.3",
|
||||
"requested": "1.3"
|
||||
},
|
||||
"org.hamcrest:hamcrest-library": {
|
||||
"locked": "1.3",
|
||||
"requested": "1.3"
|
||||
},
|
||||
"org.mockito:mockito-core": {
|
||||
"locked": "2.8.47",
|
||||
"requested": "2.8.47"
|
||||
},
|
||||
"org.slf4j:slf4j-api": {
|
||||
"locked": "1.7.25",
|
||||
"requested": "1.7.1"
|
||||
},
|
||||
"org.springframework:spring-core": {
|
||||
"locked": "4.3.10.RELEASE",
|
||||
"requested": "4.3.10.RELEASE"
|
||||
}
|
||||
},
|
||||
"testCompileClasspath": {
|
||||
"com.anthemengineering.mojo:infer-maven-plugin": {
|
||||
"locked": "0.1.0",
|
||||
"requested": "0.1.0"
|
||||
},
|
||||
"com.datastax.cassandra:cassandra-driver-core": {
|
||||
"locked": "3.3.0",
|
||||
"requested": "3.3.0"
|
||||
},
|
||||
"com.diffplug.durian:durian": {
|
||||
"locked": "3.5.0-SNAPSHOT",
|
||||
"requested": "3.+"
|
||||
},
|
||||
"com.github.stephenc:jamm": {
|
||||
"locked": "0.2.5",
|
||||
"requested": "0.2.5"
|
||||
},
|
||||
"com.google.guava:guava": {
|
||||
"locked": "21.0",
|
||||
"requested": "20.0"
|
||||
},
|
||||
"commons-io:commons-io": {
|
||||
"locked": "2.5",
|
||||
"requested": "2.5"
|
||||
},
|
||||
"io.dropwizard.metrics:metrics-core": {
|
||||
"locked": "3.2.2",
|
||||
"requested": "3.2.2"
|
||||
},
|
||||
"io.zipkin.brave:brave": {
|
||||
"locked": "4.0.6",
|
||||
"requested": "4.0.6"
|
||||
},
|
||||
"io.zipkin.java:zipkin": {
|
||||
"locked": "1.29.2",
|
||||
"requested": "1.29.2"
|
||||
},
|
||||
"javax.validation:validation-api": {
|
||||
"locked": "2.0.0.CR3",
|
||||
"requested": "2.0.0.CR3"
|
||||
},
|
||||
"junit:junit": {
|
||||
"locked": "4.12",
|
||||
"requested": "4.12"
|
||||
},
|
||||
"org.apache.cassandra:cassandra-all": {
|
||||
"locked": "3.11.0",
|
||||
"requested": "3.11.0"
|
||||
},
|
||||
"org.apache.commons:commons-lang3": {
|
||||
"locked": "3.6",
|
||||
"requested": "3.6"
|
||||
},
|
||||
"org.aspectj:aspectjrt": {
|
||||
"locked": "1.8.10",
|
||||
"requested": "1.8.10"
|
||||
},
|
||||
"org.aspectj:aspectjweaver": {
|
||||
"locked": "1.8.10",
|
||||
"requested": "1.8.10"
|
||||
},
|
||||
"org.cassandraunit:cassandra-unit": {
|
||||
"locked": "3.1.4.0-SNAPSHOT",
|
||||
"requested": "3.1.4.0-SNAPSHOT"
|
||||
},
|
||||
"org.codehaus.jackson:jackson-core-asl": {
|
||||
"locked": "1.9.13",
|
||||
"requested": "1.9.13"
|
||||
},
|
||||
"org.codehaus.jackson:jackson-mapper-asl": {
|
||||
"locked": "1.9.13",
|
||||
"requested": "1.9.13"
|
||||
},
|
||||
"org.hamcrest:hamcrest-core": {
|
||||
"locked": "1.3",
|
||||
"requested": "1.3"
|
||||
},
|
||||
"org.hamcrest:hamcrest-library": {
|
||||
"locked": "1.3",
|
||||
"requested": "1.3"
|
||||
},
|
||||
"org.mockito:mockito-core": {
|
||||
"locked": "2.8.47",
|
||||
"requested": "2.8.47"
|
||||
},
|
||||
"org.slf4j:slf4j-api": {
|
||||
"locked": "1.7.25",
|
||||
"requested": "1.7.1"
|
||||
},
|
||||
"org.springframework:spring-core": {
|
||||
"locked": "4.3.10.RELEASE",
|
||||
"requested": "4.3.10.RELEASE"
|
||||
}
|
||||
},
|
||||
"testRuntime": {
|
||||
"com.anthemengineering.mojo:infer-maven-plugin": {
|
||||
"locked": "0.1.0",
|
||||
"requested": "0.1.0"
|
||||
},
|
||||
"com.datastax.cassandra:cassandra-driver-core": {
|
||||
"locked": "3.3.0",
|
||||
"requested": "3.3.0"
|
||||
},
|
||||
"com.diffplug.durian:durian": {
|
||||
"locked": "3.5.0-SNAPSHOT",
|
||||
"requested": "3.+"
|
||||
},
|
||||
"com.github.stephenc:jamm": {
|
||||
"locked": "0.2.5",
|
||||
"requested": "0.2.5"
|
||||
},
|
||||
"com.google.guava:guava": {
|
||||
"locked": "21.0",
|
||||
"requested": "20.0"
|
||||
},
|
||||
"commons-io:commons-io": {
|
||||
"locked": "2.5",
|
||||
"requested": "2.5"
|
||||
},
|
||||
"io.dropwizard.metrics:metrics-core": {
|
||||
"locked": "3.2.2",
|
||||
"requested": "3.2.2"
|
||||
},
|
||||
"io.zipkin.brave:brave": {
|
||||
"locked": "4.0.6",
|
||||
"requested": "4.0.6"
|
||||
},
|
||||
"io.zipkin.java:zipkin": {
|
||||
"locked": "1.29.2",
|
||||
"requested": "1.29.2"
|
||||
},
|
||||
"javax.validation:validation-api": {
|
||||
"locked": "2.0.0.CR3",
|
||||
"requested": "2.0.0.CR3"
|
||||
},
|
||||
"junit:junit": {
|
||||
"locked": "4.12",
|
||||
"requested": "4.12"
|
||||
},
|
||||
"org.apache.cassandra:cassandra-all": {
|
||||
"locked": "3.11.0",
|
||||
"requested": "3.11.0"
|
||||
},
|
||||
"org.apache.commons:commons-lang3": {
|
||||
"locked": "3.6",
|
||||
"requested": "3.6"
|
||||
},
|
||||
"org.aspectj:aspectjrt": {
|
||||
"locked": "1.8.10",
|
||||
"requested": "1.8.10"
|
||||
},
|
||||
"org.aspectj:aspectjweaver": {
|
||||
"locked": "1.8.10",
|
||||
"requested": "1.8.10"
|
||||
},
|
||||
"org.cassandraunit:cassandra-unit": {
|
||||
"locked": "3.1.4.0-SNAPSHOT",
|
||||
"requested": "3.1.4.0-SNAPSHOT"
|
||||
},
|
||||
"org.codehaus.jackson:jackson-core-asl": {
|
||||
"locked": "1.9.13",
|
||||
"requested": "1.9.13"
|
||||
},
|
||||
"org.codehaus.jackson:jackson-mapper-asl": {
|
||||
"locked": "1.9.13",
|
||||
"requested": "1.9.13"
|
||||
},
|
||||
"org.hamcrest:hamcrest-core": {
|
||||
"locked": "1.3",
|
||||
"requested": "1.3"
|
||||
},
|
||||
"org.hamcrest:hamcrest-library": {
|
||||
"locked": "1.3",
|
||||
"requested": "1.3"
|
||||
},
|
||||
"org.mockito:mockito-core": {
|
||||
"locked": "2.8.47",
|
||||
"requested": "2.8.47"
|
||||
},
|
||||
"org.slf4j:jcl-over-slf4j": {
|
||||
"locked": "1.7.7",
|
||||
"requested": "1.7.1"
|
||||
},
|
||||
"org.slf4j:slf4j-api": {
|
||||
"locked": "1.7.25",
|
||||
"requested": "1.7.1"
|
||||
},
|
||||
"org.springframework:spring-core": {
|
||||
"locked": "4.3.10.RELEASE",
|
||||
"requested": "4.3.10.RELEASE"
|
||||
}
|
||||
},
|
||||
"testRuntimeClasspath": {
|
||||
"com.anthemengineering.mojo:infer-maven-plugin": {
|
||||
"locked": "0.1.0",
|
||||
"requested": "0.1.0"
|
||||
},
|
||||
"com.datastax.cassandra:cassandra-driver-core": {
|
||||
"locked": "3.3.0",
|
||||
"requested": "3.3.0"
|
||||
},
|
||||
"com.diffplug.durian:durian": {
|
||||
"locked": "3.5.0-SNAPSHOT",
|
||||
"requested": "3.+"
|
||||
},
|
||||
"com.github.stephenc:jamm": {
|
||||
"locked": "0.2.5",
|
||||
"requested": "0.2.5"
|
||||
},
|
||||
"com.google.guava:guava": {
|
||||
"locked": "21.0",
|
||||
"requested": "20.0"
|
||||
},
|
||||
"commons-io:commons-io": {
|
||||
"locked": "2.5",
|
||||
"requested": "2.5"
|
||||
},
|
||||
"io.dropwizard.metrics:metrics-core": {
|
||||
"locked": "3.2.2",
|
||||
"requested": "3.2.2"
|
||||
},
|
||||
"io.zipkin.brave:brave": {
|
||||
"locked": "4.0.6",
|
||||
"requested": "4.0.6"
|
||||
},
|
||||
"io.zipkin.java:zipkin": {
|
||||
"locked": "1.29.2",
|
||||
"requested": "1.29.2"
|
||||
},
|
||||
"javax.validation:validation-api": {
|
||||
"locked": "2.0.0.CR3",
|
||||
"requested": "2.0.0.CR3"
|
||||
},
|
||||
"junit:junit": {
|
||||
"locked": "4.12",
|
||||
"requested": "4.12"
|
||||
},
|
||||
"org.apache.cassandra:cassandra-all": {
|
||||
"locked": "3.11.0",
|
||||
"requested": "3.11.0"
|
||||
},
|
||||
"org.apache.commons:commons-lang3": {
|
||||
"locked": "3.6",
|
||||
"requested": "3.6"
|
||||
},
|
||||
"org.aspectj:aspectjrt": {
|
||||
"locked": "1.8.10",
|
||||
"requested": "1.8.10"
|
||||
},
|
||||
"org.aspectj:aspectjweaver": {
|
||||
"locked": "1.8.10",
|
||||
"requested": "1.8.10"
|
||||
},
|
||||
"org.cassandraunit:cassandra-unit": {
|
||||
"locked": "3.1.4.0-SNAPSHOT",
|
||||
"requested": "3.1.4.0-SNAPSHOT"
|
||||
},
|
||||
"org.codehaus.jackson:jackson-core-asl": {
|
||||
"locked": "1.9.13",
|
||||
"requested": "1.9.13"
|
||||
},
|
||||
"org.codehaus.jackson:jackson-mapper-asl": {
|
||||
"locked": "1.9.13",
|
||||
"requested": "1.9.13"
|
||||
},
|
||||
"org.hamcrest:hamcrest-core": {
|
||||
"locked": "1.3",
|
||||
"requested": "1.3"
|
||||
},
|
||||
"org.hamcrest:hamcrest-library": {
|
||||
"locked": "1.3",
|
||||
"requested": "1.3"
|
||||
},
|
||||
"org.mockito:mockito-core": {
|
||||
"locked": "2.8.47",
|
||||
"requested": "2.8.47"
|
||||
},
|
||||
"org.slf4j:jcl-over-slf4j": {
|
||||
"locked": "1.7.7",
|
||||
"requested": "1.7.1"
|
||||
},
|
||||
"org.slf4j:slf4j-api": {
|
||||
"locked": "1.7.25",
|
||||
"requested": "1.7.1"
|
||||
},
|
||||
"org.springframework:spring-core": {
|
||||
"locked": "4.3.10.RELEASE",
|
||||
"requested": "4.3.10.RELEASE"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -11,7 +11,7 @@
|
|||
</content>
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
<orderEntry type="library" name="Maven: com.datastax.cassandra:cassandra-driver-core:3.3.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.datastax.cassandra:cassandra-driver-core:3.3.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: io.netty:netty-handler:4.0.47.Final" level="project" />
|
||||
<orderEntry type="library" name="Maven: io.netty:netty-buffer:4.0.47.Final" level="project" />
|
||||
<orderEntry type="library" name="Maven: io.netty:netty-common:4.0.47.Final" level="project" />
|
||||
|
@ -28,16 +28,14 @@
|
|||
<orderEntry type="library" name="Maven: com.github.jnr:jnr-x86asm:1.0.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.github.jnr:jnr-posix:3.0.27" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.github.jnr:jnr-constants:0.9.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.datastax.cassandra:cassandra-driver-extras:3.3.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.diffplug.durian:durian:3.4.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.aspectj:aspectjrt:1.8.10" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.aspectj:aspectjweaver:1.8.10" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.commons:commons-lang3:3.6" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.springframework:spring-core:4.3.10.RELEASE" level="project" />
|
||||
<orderEntry type="library" name="Maven: commons-logging:commons-logging:1.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: javax.cache:cache-api:1.1.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.google.guava:guava:20.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: io.zipkin.java:zipkin:1.29.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: io.zipkin.brave:brave:4.0.6" level="project" />
|
||||
<orderEntry type="library" name="Maven: io.zipkin.reporter:zipkin-reporter:0.6.12" level="project" />
|
||||
<orderEntry type="library" name="Maven: io.dropwizard.metrics:metrics-core:3.2.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: javax.validation:validation-api:2.0.0.CR3" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="Maven: org.codehaus.jackson:jackson-mapper-asl:1.9.13" level="project" />
|
||||
|
@ -116,9 +114,9 @@
|
|||
<orderEntry type="library" scope="TEST" name="Maven: org.caffinitas.ohc:ohc-core:0.4.4" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="Maven: com.github.ben-manes.caffeine:caffeine:2.2.6" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="Maven: org.jctools:jctools-core:1.2.1" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="Maven: ca.exprofesso:guava-jcache:1.0.4" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="Maven: commons-io:commons-io:2.5" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="Maven: junit:junit:4.12" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="Maven: com.github.stephenc:jamm:0.2.5" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="Maven: org.hamcrest:hamcrest-library:1.3" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="Maven: org.hamcrest:hamcrest-core:1.3" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="Maven: org.mockito:mockito-core:2.8.47" level="project" />
|
||||
|
|
60
pom.xml
60
pom.xml
|
@ -109,7 +109,13 @@
|
|||
<dependency>
|
||||
<groupId>com.datastax.cassandra</groupId>
|
||||
<artifactId>cassandra-driver-core</artifactId>
|
||||
<version>3.3.0</version>
|
||||
<version>3.3.2</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.datastax.cassandra</groupId>
|
||||
<artifactId>cassandra-driver-extras</artifactId>
|
||||
<version>3.3.2</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
@ -118,12 +124,6 @@
|
|||
<version>3.4.0</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.aspectj</groupId>
|
||||
<artifactId>aspectjrt</artifactId>
|
||||
<version>1.8.10</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.aspectj</groupId>
|
||||
<artifactId>aspectjweaver</artifactId>
|
||||
|
@ -142,25 +142,19 @@
|
|||
<version>4.3.10.RELEASE</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>javax.cache</groupId>
|
||||
<artifactId>cache-api</artifactId>
|
||||
<version>1.1.0</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
<version>20.0</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Metrics and tracing -->
|
||||
<dependency>
|
||||
<groupId>io.zipkin.java</groupId>
|
||||
<artifactId>zipkin</artifactId>
|
||||
<version>1.29.2</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.zipkin.brave</groupId>
|
||||
<artifactId>brave</artifactId>
|
||||
<version>4.0.6</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Metrics -->
|
||||
<dependency>
|
||||
<groupId>io.dropwizard.metrics</groupId>
|
||||
<artifactId>metrics-core</artifactId>
|
||||
|
@ -217,6 +211,24 @@
|
|||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>ca.exprofesso</groupId>
|
||||
<artifactId>guava-jcache</artifactId>
|
||||
<version>1.0.4</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>javax.cache</groupId>
|
||||
<artifactId>cache-api</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>commons-io</groupId>
|
||||
<artifactId>commons-io</artifactId>
|
||||
|
@ -231,13 +243,6 @@
|
|||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.github.stephenc</groupId>
|
||||
<artifactId>jamm</artifactId>
|
||||
<version>0.2.5</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.hamcrest</groupId>
|
||||
<artifactId>hamcrest-library</artifactId>
|
||||
|
@ -272,7 +277,6 @@
|
|||
<version>1.7.1</version>
|
||||
<scope>runtime</scope>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
rootProject.name = 'helenus-core'
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -15,9 +16,8 @@
|
|||
*/
|
||||
package com.datastax.driver.core.querybuilder;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import com.datastax.driver.core.CodecRegistry;
|
||||
import java.util.List;
|
||||
|
||||
public class IsNotNullClause extends Clause {
|
||||
|
||||
|
|
|
@ -16,8 +16,10 @@ public class CreateCustomIndex extends CreateIndex {
|
|||
CreateCustomIndex(String indexName) {
|
||||
super(indexName);
|
||||
validateNotEmpty(indexName, "Index name");
|
||||
validateNotKeyWord(indexName,
|
||||
String.format("The index name '%s' is not allowed because it is a reserved keyword", indexName));
|
||||
validateNotKeyWord(
|
||||
indexName,
|
||||
String.format(
|
||||
"The index name '%s' is not allowed because it is a reserved keyword", indexName));
|
||||
this.indexName = indexName;
|
||||
}
|
||||
|
||||
|
@ -34,20 +36,22 @@ public class CreateCustomIndex extends CreateIndex {
|
|||
/**
|
||||
* Specify the keyspace and table to create the index on.
|
||||
*
|
||||
* @param keyspaceName
|
||||
* the keyspace name.
|
||||
* @param tableName
|
||||
* the table name.
|
||||
* @return a {@link CreateIndex.CreateIndexOn} that will allow the specification
|
||||
* of the column.
|
||||
* @param keyspaceName the keyspace name.
|
||||
* @param tableName the table name.
|
||||
* @return a {@link CreateIndex.CreateIndexOn} that will allow the specification of the column.
|
||||
*/
|
||||
public CreateIndex.CreateIndexOn onTable(String keyspaceName, String tableName) {
|
||||
validateNotEmpty(keyspaceName, "Keyspace name");
|
||||
validateNotEmpty(tableName, "Table name");
|
||||
validateNotKeyWord(keyspaceName,
|
||||
String.format("The keyspace name '%s' is not allowed because it is a reserved keyword", keyspaceName));
|
||||
validateNotKeyWord(tableName,
|
||||
String.format("The table name '%s' is not allowed because it is a reserved keyword", tableName));
|
||||
validateNotKeyWord(
|
||||
keyspaceName,
|
||||
String.format(
|
||||
"The keyspace name '%s' is not allowed because it is a reserved keyword",
|
||||
keyspaceName));
|
||||
validateNotKeyWord(
|
||||
tableName,
|
||||
String.format(
|
||||
"The table name '%s' is not allowed because it is a reserved keyword", tableName));
|
||||
this.keyspaceName = Optional.fromNullable(keyspaceName);
|
||||
this.tableName = tableName;
|
||||
return new CreateCustomIndex.CreateIndexOn();
|
||||
|
@ -56,52 +60,19 @@ public class CreateCustomIndex extends CreateIndex {
|
|||
/**
|
||||
* Specify the table to create the index on.
|
||||
*
|
||||
* @param tableName
|
||||
* the table name.
|
||||
* @return a {@link CreateIndex.CreateIndexOn} that will allow the specification
|
||||
* of the column.
|
||||
* @param tableName the table name.
|
||||
* @return a {@link CreateIndex.CreateIndexOn} that will allow the specification of the column.
|
||||
*/
|
||||
public CreateIndex.CreateIndexOn onTable(String tableName) {
|
||||
validateNotEmpty(tableName, "Table name");
|
||||
validateNotKeyWord(tableName,
|
||||
String.format("The table name '%s' is not allowed because it is a reserved keyword", tableName));
|
||||
validateNotKeyWord(
|
||||
tableName,
|
||||
String.format(
|
||||
"The table name '%s' is not allowed because it is a reserved keyword", tableName));
|
||||
this.tableName = tableName;
|
||||
return new CreateCustomIndex.CreateIndexOn();
|
||||
}
|
||||
|
||||
public class CreateIndexOn extends CreateIndex.CreateIndexOn {
|
||||
/**
|
||||
* Specify the column to create the index on.
|
||||
*
|
||||
* @param columnName
|
||||
* the column name.
|
||||
* @return the final CREATE INDEX statement.
|
||||
*/
|
||||
public SchemaStatement andColumn(String columnName) {
|
||||
validateNotEmpty(columnName, "Column name");
|
||||
validateNotKeyWord(columnName,
|
||||
String.format("The column name '%s' is not allowed because it is a reserved keyword", columnName));
|
||||
CreateCustomIndex.this.columnName = columnName;
|
||||
return SchemaStatement.fromQueryString(buildInternal());
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an index on the keys of the given map column.
|
||||
*
|
||||
* @param columnName
|
||||
* the column name.
|
||||
* @return the final CREATE INDEX statement.
|
||||
*/
|
||||
public SchemaStatement andKeysOfColumn(String columnName) {
|
||||
validateNotEmpty(columnName, "Column name");
|
||||
validateNotKeyWord(columnName,
|
||||
String.format("The column name '%s' is not allowed because it is a reserved keyword", columnName));
|
||||
CreateCustomIndex.this.columnName = columnName;
|
||||
CreateCustomIndex.this.keys = true;
|
||||
return SchemaStatement.fromQueryString(buildInternal());
|
||||
}
|
||||
}
|
||||
|
||||
String getCustomClassName() {
|
||||
return "";
|
||||
}
|
||||
|
@ -112,7 +83,8 @@ public class CreateCustomIndex extends CreateIndex {
|
|||
|
||||
@Override
|
||||
public String buildInternal() {
|
||||
StringBuilder createStatement = new StringBuilder(STATEMENT_START).append("CREATE CUSTOM INDEX ");
|
||||
StringBuilder createStatement =
|
||||
new StringBuilder(STATEMENT_START).append("CREATE CUSTOM INDEX ");
|
||||
|
||||
if (ifNotExists) {
|
||||
createStatement.append("IF NOT EXISTS ");
|
||||
|
@ -145,4 +117,39 @@ public class CreateCustomIndex extends CreateIndex {
|
|||
|
||||
return createStatement.toString();
|
||||
}
|
||||
|
||||
public class CreateIndexOn extends CreateIndex.CreateIndexOn {
|
||||
/**
|
||||
* Specify the column to create the index on.
|
||||
*
|
||||
* @param columnName the column name.
|
||||
* @return the final CREATE INDEX statement.
|
||||
*/
|
||||
public SchemaStatement andColumn(String columnName) {
|
||||
validateNotEmpty(columnName, "Column name");
|
||||
validateNotKeyWord(
|
||||
columnName,
|
||||
String.format(
|
||||
"The column name '%s' is not allowed because it is a reserved keyword", columnName));
|
||||
CreateCustomIndex.this.columnName = columnName;
|
||||
return SchemaStatement.fromQueryString(buildInternal());
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an index on the keys of the given map column.
|
||||
*
|
||||
* @param columnName the column name.
|
||||
* @return the final CREATE INDEX statement.
|
||||
*/
|
||||
public SchemaStatement andKeysOfColumn(String columnName) {
|
||||
validateNotEmpty(columnName, "Column name");
|
||||
validateNotKeyWord(
|
||||
columnName,
|
||||
String.format(
|
||||
"The column name '%s' is not allowed because it is a reserved keyword", columnName));
|
||||
CreateCustomIndex.this.columnName = columnName;
|
||||
CreateCustomIndex.this.keys = true;
|
||||
return SchemaStatement.fromQueryString(buildInternal());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,12 +5,16 @@ import com.datastax.driver.core.querybuilder.Select;
|
|||
|
||||
public class CreateMaterializedView extends Create {
|
||||
|
||||
private String viewName;
|
||||
private final String viewName;
|
||||
private Select.Where selection;
|
||||
private String primaryKey;
|
||||
private String clustering;
|
||||
|
||||
public CreateMaterializedView(String keyspaceName, String viewName, Select.Where selection, String primaryKey,
|
||||
public CreateMaterializedView(
|
||||
String keyspaceName,
|
||||
String viewName,
|
||||
Select.Where selection,
|
||||
String primaryKey,
|
||||
String clustering) {
|
||||
super(keyspaceName, viewName);
|
||||
this.viewName = viewName;
|
||||
|
@ -24,7 +28,8 @@ public class CreateMaterializedView extends Create {
|
|||
}
|
||||
|
||||
public String buildInternal() {
|
||||
StringBuilder createStatement = new StringBuilder(STATEMENT_START).append("CREATE MATERIALIZED VIEW");
|
||||
StringBuilder createStatement =
|
||||
new StringBuilder(STATEMENT_START).append("CREATE MATERIALIZED VIEW");
|
||||
if (ifNotExists) {
|
||||
createStatement.append(" IF NOT EXISTS");
|
||||
}
|
||||
|
|
|
@ -11,7 +11,8 @@ public class CreateSasiIndex extends CreateCustomIndex {
|
|||
}
|
||||
|
||||
String getOptions() {
|
||||
return "'analyzer_class': " + "'org.apache.cassandra.index.sasi.analyzer.NonTokenizingAnalyzer', "
|
||||
return "'analyzer_class': "
|
||||
+ "'org.apache.cassandra.index.sasi.analyzer.NonTokenizingAnalyzer', "
|
||||
+ "'case_sensitive': 'false'";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,14 +4,9 @@ import com.google.common.base.Optional;
|
|||
|
||||
public class DropMaterializedView extends Drop {
|
||||
|
||||
enum DroppedItem {
|
||||
TABLE, TYPE, INDEX, MATERIALIZED_VIEW
|
||||
}
|
||||
|
||||
private Optional<String> keyspaceName = Optional.absent();
|
||||
private String itemName;
|
||||
private boolean ifExists = true;
|
||||
private final String itemType = "MATERIALIZED VIEW";
|
||||
|
||||
public DropMaterializedView(String keyspaceName, String viewName) {
|
||||
this(keyspaceName, viewName, DroppedItem.MATERIALIZED_VIEW);
|
||||
|
@ -36,7 +31,7 @@ public class DropMaterializedView extends Drop {
|
|||
|
||||
@Override
|
||||
public String buildInternal() {
|
||||
StringBuilder dropStatement = new StringBuilder("DROP " + itemType + " ");
|
||||
StringBuilder dropStatement = new StringBuilder("DROP MATERIALIZED VIEW ");
|
||||
if (ifExists) {
|
||||
dropStatement.append("IF EXISTS ");
|
||||
}
|
||||
|
@ -47,4 +42,11 @@ public class DropMaterializedView extends Drop {
|
|||
dropStatement.append(itemName);
|
||||
return dropStatement.toString();
|
||||
}
|
||||
|
||||
enum DroppedItem {
|
||||
TABLE,
|
||||
TYPE,
|
||||
INDEX,
|
||||
MATERIALIZED_VIEW
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -17,7 +18,6 @@ package net.helenus.config;
|
|||
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.function.Function;
|
||||
|
||||
import net.helenus.core.DslInstantiator;
|
||||
import net.helenus.core.MapperInstantiator;
|
||||
import net.helenus.core.reflect.ReflectionDslInstantiator;
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -18,7 +19,6 @@ package net.helenus.config;
|
|||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.util.function.Function;
|
||||
|
||||
import net.helenus.mapping.annotation.Transient;
|
||||
|
||||
public enum GetterMethodDetector implements Function<Method, Boolean> {
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -17,7 +18,6 @@ package net.helenus.config;
|
|||
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.function.Function;
|
||||
|
||||
import net.helenus.core.DslInstantiator;
|
||||
import net.helenus.core.MapperInstantiator;
|
||||
|
||||
|
|
|
@ -3,7 +3,6 @@ package net.helenus.core;
|
|||
import java.time.LocalDateTime;
|
||||
import java.time.ZoneId;
|
||||
import java.util.Date;
|
||||
|
||||
import net.helenus.core.reflect.MapExportable;
|
||||
|
||||
public abstract class AbstractAuditedEntityDraft<E> extends AbstractEntityDraft<E> {
|
||||
|
@ -34,6 +33,6 @@ public abstract class AbstractAuditedEntityDraft<E> extends AbstractEntityDraft<
|
|||
}
|
||||
|
||||
public Date createdAt() {
|
||||
return (Date) get("createdAt", Date.class);
|
||||
return get("createdAt", Date.class);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,26 +1,37 @@
|
|||
package net.helenus.core;
|
||||
|
||||
import com.google.common.primitives.Primitives;
|
||||
import java.io.Serializable;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
|
||||
import com.google.common.primitives.Primitives;
|
||||
|
||||
import net.helenus.core.reflect.DefaultPrimitiveTypes;
|
||||
import net.helenus.core.reflect.Drafted;
|
||||
import net.helenus.core.reflect.MapExportable;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
import net.helenus.mapping.MappingUtil;
|
||||
import org.apache.commons.lang3.SerializationUtils;
|
||||
|
||||
public abstract class AbstractEntityDraft<E> implements Drafted<E> {
|
||||
|
||||
private final Map<String, Object> backingMap = new HashMap<String, Object>();
|
||||
private final MapExportable entity;
|
||||
private final Map<String, Object> entityMap;
|
||||
private final Map<String, Object> valuesMap;
|
||||
private final Set<String> readSet;
|
||||
private final Map<String, Object> mutationsMap = new HashMap<String, Object>();
|
||||
|
||||
public AbstractEntityDraft(MapExportable entity) {
|
||||
this.entity = entity;
|
||||
this.entityMap = entity != null ? entity.toMap() : new HashMap<String, Object>();
|
||||
// Entities can mutate their map.
|
||||
if (entity != null) {
|
||||
this.valuesMap = entity.toMap(true);
|
||||
this.readSet = entity.toReadSet();
|
||||
} else {
|
||||
this.valuesMap = new HashMap<String, Object>();
|
||||
this.readSet = new HashSet<String>();
|
||||
}
|
||||
}
|
||||
|
||||
public abstract Class<E> getEntityClass();
|
||||
|
@ -30,16 +41,17 @@ public abstract class AbstractEntityDraft<E> implements Drafted<E> {
|
|||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected <T> T get(Getter<T> getter, Class<?> returnType) {
|
||||
public <T> T get(Getter<T> getter, Class<?> returnType) {
|
||||
return (T) get(this.<T>methodNameFor(getter), returnType);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected <T> T get(String key, Class<?> returnType) {
|
||||
T value = (T) backingMap.get(key);
|
||||
public <T> T get(String key, Class<?> returnType) {
|
||||
readSet.add(key);
|
||||
T value = (T) mutationsMap.get(key);
|
||||
|
||||
if (value == null) {
|
||||
value = (T) entityMap.get(key);
|
||||
value = (T) valuesMap.get(key);
|
||||
if (value == null) {
|
||||
|
||||
if (Primitives.allPrimitiveTypes().contains(returnType)) {
|
||||
|
@ -51,52 +63,65 @@ public abstract class AbstractEntityDraft<E> implements Drafted<E> {
|
|||
|
||||
return (T) type.getDefaultValue();
|
||||
}
|
||||
} else {
|
||||
// Collections fetched from the valuesMap
|
||||
if (value instanceof Collection) {
|
||||
value = (T) SerializationUtils.<Serializable>clone((Serializable) value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
protected <T> Object set(Getter<T> getter, Object value) {
|
||||
return set(this.<T>methodNameFor(getter), value);
|
||||
}
|
||||
public <T> Object set(Getter<T> getter, Object value) {
|
||||
HelenusProperty prop = MappingUtil.resolveMappingProperty(getter).getProperty();
|
||||
String key = prop.getPropertyName();
|
||||
|
||||
HelenusValidator.INSTANCE.validate(prop, value);
|
||||
|
||||
protected Object set(String key, Object value) {
|
||||
if (key == null || value == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
backingMap.put(key, value);
|
||||
mutationsMap.put(key, value);
|
||||
return value;
|
||||
}
|
||||
|
||||
public Object set(String key, Object value) {
|
||||
if (key == null || value == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
mutationsMap.put(key, value);
|
||||
return value;
|
||||
}
|
||||
|
||||
public void put(String key, Object value) {
|
||||
mutationsMap.put(key, value);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected <T> T mutate(Getter<T> getter, T value) {
|
||||
public <T> T mutate(Getter<T> getter, T value) {
|
||||
return (T) mutate(this.<T>methodNameFor(getter), value);
|
||||
}
|
||||
|
||||
protected Object mutate(String key, Object value) {
|
||||
public <T> T mutate(String key, T value) {
|
||||
Objects.requireNonNull(key);
|
||||
|
||||
if (value == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (value != null) {
|
||||
if (entity != null) {
|
||||
Map<String, Object> map = entity.toMap();
|
||||
|
||||
if (map.containsKey(key) && !value.equals(map.get(key))) {
|
||||
backingMap.put(key, value);
|
||||
T currentValue = this.<T>fetch(key);
|
||||
if (!value.equals(currentValue)) {
|
||||
mutationsMap.put(key, value);
|
||||
return value;
|
||||
}
|
||||
|
||||
return map.get(key);
|
||||
} else {
|
||||
backingMap.put(key, value);
|
||||
|
||||
return null;
|
||||
mutationsMap.put(key, value);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private <T> String methodNameFor(Getter<T> getter) {
|
||||
return MappingUtil.resolveMappingProperty(getter).getProperty().getPropertyName();
|
||||
|
@ -108,8 +133,8 @@ public abstract class AbstractEntityDraft<E> implements Drafted<E> {
|
|||
|
||||
public Object unset(String key) {
|
||||
if (key != null) {
|
||||
Object value = backingMap.get(key);
|
||||
backingMap.put(key, null);
|
||||
Object value = mutationsMap.get(key);
|
||||
mutationsMap.put(key, null);
|
||||
return value;
|
||||
}
|
||||
return null;
|
||||
|
@ -119,10 +144,18 @@ public abstract class AbstractEntityDraft<E> implements Drafted<E> {
|
|||
return this.<T>reset(this.<T>methodNameFor(getter), desiredValue);
|
||||
}
|
||||
|
||||
private <T> T fetch(String key) {
|
||||
T value = (T) mutationsMap.get(key);
|
||||
if (value == null) {
|
||||
value = (T) valuesMap.get(key);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
public <T> boolean reset(String key, T desiredValue) {
|
||||
if (key != null && desiredValue != null) {
|
||||
@SuppressWarnings("unchecked")
|
||||
T currentValue = (T) backingMap.get(key);
|
||||
T currentValue = (T) this.<T>fetch(key);
|
||||
if (currentValue == null || !currentValue.equals(desiredValue)) {
|
||||
set(key, desiredValue);
|
||||
return true;
|
||||
|
@ -133,32 +166,37 @@ public abstract class AbstractEntityDraft<E> implements Drafted<E> {
|
|||
|
||||
@Override
|
||||
public Map<String, Object> toMap() {
|
||||
return toMap(entityMap);
|
||||
return toMap(valuesMap);
|
||||
}
|
||||
|
||||
public Map<String, Object> toMap(Map<String, Object> entityMap) {
|
||||
Map<String, Object> combined;
|
||||
if (entityMap != null && entityMap.size() > 0) {
|
||||
combined = new HashMap<String, Object>(entityMap.size());
|
||||
for (String key : entityMap.keySet()) {
|
||||
combined.put(key, entityMap.get(key));
|
||||
for (Map.Entry<String, Object> e : entityMap.entrySet()) {
|
||||
combined.put(e.getKey(), e.getValue());
|
||||
}
|
||||
} else {
|
||||
combined = new HashMap<String, Object>(backingMap.size());
|
||||
combined = new HashMap<String, Object>(mutationsMap.size());
|
||||
}
|
||||
for (String key : mutated()) {
|
||||
combined.put(key, backingMap.get(key));
|
||||
combined.put(key, mutationsMap.get(key));
|
||||
}
|
||||
return combined;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<String> mutated() {
|
||||
return backingMap.keySet();
|
||||
return mutationsMap.keySet();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<String> read() {
|
||||
return readSet;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return backingMap.toString();
|
||||
return mutationsMap.toString();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -15,24 +16,21 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import com.codahale.metrics.MetricRegistry;
|
||||
import com.datastax.driver.core.*;
|
||||
import com.google.common.base.Stopwatch;
|
||||
import com.google.common.collect.Table;
|
||||
import com.google.common.util.concurrent.ListenableFuture;
|
||||
import java.io.PrintStream;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.Executor;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.codahale.metrics.MetricRegistry;
|
||||
import com.datastax.driver.core.*;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
import com.google.common.collect.Table;
|
||||
import com.google.common.util.concurrent.ListenableFuture;
|
||||
|
||||
import brave.Tracer;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.mapping.value.ColumnValuePreparer;
|
||||
import net.helenus.mapping.value.ColumnValueProvider;
|
||||
import net.helenus.support.Either;
|
||||
import net.helenus.support.HelenusException;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public abstract class AbstractSessionOperations {
|
||||
|
||||
|
@ -44,6 +42,8 @@ public abstract class AbstractSessionOperations {
|
|||
|
||||
public abstract boolean isShowCql();
|
||||
|
||||
public abstract boolean showValues();
|
||||
|
||||
public abstract PrintStream getPrintStream();
|
||||
|
||||
public abstract Executor getExecutor();
|
||||
|
@ -60,7 +60,6 @@ public abstract class AbstractSessionOperations {
|
|||
|
||||
public PreparedStatement prepare(RegularStatement statement) {
|
||||
try {
|
||||
log(statement, false);
|
||||
return currentSession().prepare(statement);
|
||||
} catch (RuntimeException e) {
|
||||
throw translateException(e);
|
||||
|
@ -69,58 +68,53 @@ public abstract class AbstractSessionOperations {
|
|||
|
||||
public ListenableFuture<PreparedStatement> prepareAsync(RegularStatement statement) {
|
||||
try {
|
||||
log(statement, false);
|
||||
return currentSession().prepareAsync(statement);
|
||||
} catch (RuntimeException e) {
|
||||
throw translateException(e);
|
||||
}
|
||||
}
|
||||
|
||||
public ResultSet execute(Statement statement, boolean showValues) {
|
||||
return executeAsync(statement, showValues).getUninterruptibly();
|
||||
public ResultSet execute(Statement statement) {
|
||||
return execute(statement, null, null);
|
||||
}
|
||||
|
||||
public ResultSetFuture executeAsync(Statement statement, boolean showValues) {
|
||||
public ResultSet execute(Statement statement, Stopwatch timer) {
|
||||
return execute(statement, null, timer);
|
||||
}
|
||||
|
||||
public ResultSet execute(Statement statement, UnitOfWork uow) {
|
||||
return execute(statement, uow, null);
|
||||
}
|
||||
|
||||
public ResultSet execute(Statement statement, UnitOfWork uow, Stopwatch timer) {
|
||||
return executeAsync(statement, uow, timer).getUninterruptibly();
|
||||
}
|
||||
|
||||
public ResultSetFuture executeAsync(Statement statement) {
|
||||
return executeAsync(statement, null, null);
|
||||
}
|
||||
|
||||
public ResultSetFuture executeAsync(Statement statement, Stopwatch timer) {
|
||||
return executeAsync(statement, null, timer);
|
||||
}
|
||||
|
||||
public ResultSetFuture executeAsync(Statement statement, UnitOfWork uow) {
|
||||
return executeAsync(statement, uow, null);
|
||||
}
|
||||
|
||||
public ResultSetFuture executeAsync(Statement statement, UnitOfWork uow, Stopwatch timer) {
|
||||
try {
|
||||
log(statement, showValues);
|
||||
return currentSession().executeAsync(statement);
|
||||
} catch (RuntimeException e) {
|
||||
throw translateException(e);
|
||||
}
|
||||
}
|
||||
|
||||
void log(Statement statement, boolean showValues) {
|
||||
if (LOG.isInfoEnabled()) {
|
||||
LOG.info("Execute statement " + statement);
|
||||
}
|
||||
if (isShowCql()) {
|
||||
if (statement instanceof BuiltStatement) {
|
||||
BuiltStatement builtStatement = (BuiltStatement) statement;
|
||||
if (showValues) {
|
||||
RegularStatement regularStatement = builtStatement.setForceNoValues(true);
|
||||
printCql(regularStatement.getQueryString());
|
||||
} else {
|
||||
printCql(builtStatement.getQueryString());
|
||||
}
|
||||
} else if (statement instanceof RegularStatement) {
|
||||
RegularStatement regularStatement = (RegularStatement) statement;
|
||||
printCql(regularStatement.getQueryString());
|
||||
} else {
|
||||
printCql(statement.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public Tracer getZipkinTracer() {
|
||||
return null;
|
||||
}
|
||||
|
||||
public MetricRegistry getMetricRegistry() {
|
||||
return null;
|
||||
}
|
||||
|
||||
public void mergeCache(Table<String, String, Object> cache) {
|
||||
}
|
||||
public void mergeCache(Table<String, String, Either<Object, List<Facet>>> uowCache) {}
|
||||
|
||||
RuntimeException translateException(RuntimeException e) {
|
||||
if (e instanceof HelenusException) {
|
||||
|
@ -133,10 +127,7 @@ public abstract class AbstractSessionOperations {
|
|||
return null;
|
||||
}
|
||||
|
||||
public void updateCache(Object pojo, List<Facet> facets) {
|
||||
}
|
||||
public void updateCache(Object pojo, List<Facet> facets) {}
|
||||
|
||||
void printCql(String cql) {
|
||||
getPrintStream().println(cql);
|
||||
}
|
||||
public void cacheEvict(List<Facet> facets) {}
|
||||
}
|
||||
|
|
|
@ -1,253 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.diffplug.common.base.Errors;
|
||||
import com.google.common.base.Stopwatch;
|
||||
import com.google.common.collect.HashBasedTable;
|
||||
import com.google.common.collect.Table;
|
||||
import com.google.common.collect.TreeTraverser;
|
||||
|
||||
import net.helenus.core.cache.CacheUtil;
|
||||
import net.helenus.core.cache.Facet;
|
||||
|
||||
/** Encapsulates the concept of a "transaction" as a unit-of-work. */
|
||||
public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfWork<E>, AutoCloseable {
|
||||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(AbstractUnitOfWork.class);
|
||||
|
||||
private final List<AbstractUnitOfWork<E>> nested = new ArrayList<>();
|
||||
private final HelenusSession session;
|
||||
private final AbstractUnitOfWork<E> parent;
|
||||
// Cache:
|
||||
private final Table<String, String, Object> cache = HashBasedTable.create();
|
||||
private List<CommitThunk> postCommit = new ArrayList<CommitThunk>();
|
||||
private boolean aborted = false;
|
||||
private boolean committed = false;
|
||||
private String purpose_;
|
||||
private Stopwatch elapsedTime_;
|
||||
private Stopwatch databaseTime_ = Stopwatch.createUnstarted();
|
||||
private Stopwatch cacheLookupTime_ = Stopwatch.createUnstarted();
|
||||
|
||||
protected AbstractUnitOfWork(HelenusSession session, AbstractUnitOfWork<E> parent) {
|
||||
Objects.requireNonNull(session, "containing session cannot be null");
|
||||
|
||||
this.session = session;
|
||||
this.parent = parent;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Stopwatch getExecutionTimer() {
|
||||
return databaseTime_;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Stopwatch getCacheLookupTimer() {
|
||||
return cacheLookupTime_;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addNestedUnitOfWork(UnitOfWork<E> uow) {
|
||||
synchronized (nested) {
|
||||
nested.add((AbstractUnitOfWork<E>) uow);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public UnitOfWork<E> begin() {
|
||||
elapsedTime_ = Stopwatch.createStarted();
|
||||
// log.record(txn::start)
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public UnitOfWork setPurpose(String purpose) {
|
||||
purpose_ = purpose;
|
||||
return this;
|
||||
}
|
||||
|
||||
public void logTimers(String what) {
|
||||
double e = (double) elapsedTime_.elapsed(TimeUnit.MICROSECONDS) / 1000.0;
|
||||
double d = (double) databaseTime_.elapsed(TimeUnit.MICROSECONDS) / 1000.0;
|
||||
double c = (double) cacheLookupTime_.elapsed(TimeUnit.MICROSECONDS) / 1000.0;
|
||||
double fd = (d / (e - c)) * 100.0;
|
||||
double fc = (c / (e - d)) * 100.0;
|
||||
LOG.info(String.format("UOW(%s)%s %s (total: %.3fms cache: %.3fms %2.2f%% db: %.3fms %2.2f%%)", hashCode(),
|
||||
(purpose_ == null ? "" : " " + purpose_), what, e, c, fc, d, fd));
|
||||
}
|
||||
|
||||
private void applyPostCommitFunctions() {
|
||||
if (!postCommit.isEmpty()) {
|
||||
for (CommitThunk f : postCommit) {
|
||||
f.apply();
|
||||
}
|
||||
}
|
||||
logTimers("committed");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<Object> cacheLookup(List<Facet> facets) {
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
Optional<Object> result = Optional.empty();
|
||||
for (Facet facet : facets) {
|
||||
if (!facet.fixed()) {
|
||||
String columnName = facet.name() + "==" + facet.value();
|
||||
Object value = cache.get(tableName, columnName);
|
||||
if (value != null) {
|
||||
if (result.isPresent() && result.get() != value) {
|
||||
// One facet matched, but another did not.
|
||||
result = Optional.empty();
|
||||
break;
|
||||
} else {
|
||||
result = Optional.of(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!result.isPresent()) {
|
||||
// Be sure to check all enclosing UnitOfWork caches as well, we may be nested.
|
||||
if (parent != null) {
|
||||
return parent.cacheLookup(facets);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void cacheUpdate(Object value, List<Facet> facets) {
|
||||
Facet table = facets.remove(0);
|
||||
String tableName = table.value().toString();
|
||||
for (Facet facet : facets) {
|
||||
String columnName = facet.name() + "==" + facet.value();
|
||||
cache.put(tableName, columnName, value);
|
||||
}
|
||||
}
|
||||
|
||||
private Iterator<AbstractUnitOfWork<E>> getChildNodes() {
|
||||
return nested.iterator();
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks to see if the work performed between calling begin and now can be
|
||||
* committed or not.
|
||||
*
|
||||
* @return a function from which to chain work that only happens when commit is
|
||||
* successful
|
||||
* @throws E
|
||||
* when the work overlaps with other concurrent writers.
|
||||
*/
|
||||
public PostCommitFunction<Void, Void> commit() throws E {
|
||||
// All nested UnitOfWork should be committed (not aborted) before calls to
|
||||
// commit, check.
|
||||
boolean canCommit = true;
|
||||
TreeTraverser<AbstractUnitOfWork<E>> traverser = TreeTraverser.using(node -> node::getChildNodes);
|
||||
for (AbstractUnitOfWork<E> uow : traverser.postOrderTraversal(this)) {
|
||||
if (this != uow) {
|
||||
canCommit &= (!uow.aborted && uow.committed);
|
||||
}
|
||||
}
|
||||
|
||||
// log.record(txn::provisionalCommit)
|
||||
// examine log for conflicts in read-set and write-set between begin and
|
||||
// provisional commit
|
||||
// if (conflict) { throw new ConflictingUnitOfWorkException(this) }
|
||||
// else return function so as to enable commit.andThen(() -> { do something iff
|
||||
// commit was successful; })
|
||||
|
||||
if (canCommit) {
|
||||
committed = true;
|
||||
aborted = false;
|
||||
|
||||
nested.forEach((uow) -> Errors.rethrow().wrap(uow::commit));
|
||||
|
||||
// Merge UOW cache into parent's cache.
|
||||
if (parent != null) {
|
||||
parent.mergeCache(cache);
|
||||
} else {
|
||||
session.mergeCache(cache);
|
||||
}
|
||||
elapsedTime_.stop();
|
||||
|
||||
// Apply all post-commit functions for
|
||||
if (parent == null) {
|
||||
traverser.postOrderTraversal(this).forEach(uow -> {
|
||||
uow.applyPostCommitFunctions();
|
||||
});
|
||||
return new PostCommitFunction(this, null);
|
||||
}
|
||||
}
|
||||
// else {
|
||||
// Constructor<T> ctor = clazz.getConstructor(conflictExceptionClass);
|
||||
// T object = ctor.newInstance(new Object[] { String message });
|
||||
// }
|
||||
return new PostCommitFunction(this, postCommit);
|
||||
}
|
||||
|
||||
/* Explicitly discard the work and mark it as as such in the log. */
|
||||
public void abort() {
|
||||
TreeTraverser<AbstractUnitOfWork<E>> traverser = TreeTraverser.using(node -> node::getChildNodes);
|
||||
traverser.postOrderTraversal(this).forEach(uow -> {
|
||||
uow.committed = false;
|
||||
uow.aborted = true;
|
||||
});
|
||||
// log.record(txn::abort)
|
||||
// cache.invalidateSince(txn::start time)
|
||||
if (!hasAborted()) {
|
||||
elapsedTime_.stop();
|
||||
logTimers("aborted");
|
||||
}
|
||||
}
|
||||
|
||||
private void mergeCache(Table<String, String, Object> from) {
|
||||
Table<String, String, Object> to = this.cache;
|
||||
from.rowMap().forEach((rowKey, columnMap) -> {
|
||||
columnMap.forEach((columnKey, value) -> {
|
||||
if (to.contains(rowKey, columnKey)) {
|
||||
to.put(rowKey, columnKey, CacheUtil.merge(to.get(rowKey, columnKey), from.get(rowKey, columnKey)));
|
||||
} else {
|
||||
to.put(rowKey, columnKey, from.get(rowKey, columnKey));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
public String describeConflicts() {
|
||||
return "it's complex...";
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws E {
|
||||
// Closing a AbstractUnitOfWork will abort iff we've not already aborted or
|
||||
// committed this unit of work.
|
||||
if (aborted == false && committed == false) {
|
||||
abort();
|
||||
}
|
||||
}
|
||||
|
||||
public boolean hasAborted() {
|
||||
return aborted;
|
||||
}
|
||||
|
||||
public boolean hasCommitted() {
|
||||
return committed;
|
||||
}
|
||||
}
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,5 +17,8 @@
|
|||
package net.helenus.core;
|
||||
|
||||
public enum AutoDdl {
|
||||
VALIDATE, UPDATE, CREATE, CREATE_DROP;
|
||||
VALIDATE,
|
||||
UPDATE,
|
||||
CREATE,
|
||||
CREATE_DROP;
|
||||
}
|
||||
|
|
|
@ -1,6 +0,0 @@
|
|||
package net.helenus.core;
|
||||
|
||||
@FunctionalInterface
|
||||
public interface CommitThunk {
|
||||
void apply();
|
||||
}
|
|
@ -1,3 +1,20 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package net.helenus.core;
|
||||
|
||||
public class ConflictingUnitOfWorkException extends Exception {
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -15,13 +16,15 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
import com.datastax.driver.core.Metadata;
|
||||
|
||||
import java.util.Optional;
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
|
||||
public interface DslInstantiator {
|
||||
|
||||
<E> E instantiate(Class<E> iface, ClassLoader classLoader, Optional<HelenusPropertyNode> parent, Metadata metadata);
|
||||
<E> E instantiate(
|
||||
Class<E> iface,
|
||||
ClassLoader classLoader,
|
||||
Optional<HelenusPropertyNode> parent,
|
||||
Metadata metadata);
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -15,10 +16,8 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
import com.datastax.driver.core.querybuilder.Clause;
|
||||
|
||||
import java.util.Objects;
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.mapping.MappingUtil;
|
||||
import net.helenus.mapping.value.ColumnValuePreparer;
|
||||
|
@ -33,14 +32,6 @@ public final class Filter<V> {
|
|||
this.postulate = postulate;
|
||||
}
|
||||
|
||||
public HelenusPropertyNode getNode() {
|
||||
return node;
|
||||
}
|
||||
|
||||
public Clause getClause(ColumnValuePreparer valuePreparer) {
|
||||
return postulate.getClause(node, valuePreparer);
|
||||
}
|
||||
|
||||
public static <V> Filter<V> equal(Getter<V> getter, V val) {
|
||||
return create(getter, Operator.EQ, val);
|
||||
}
|
||||
|
@ -89,13 +80,21 @@ public final class Filter<V> {
|
|||
return new Filter<V>(node, postulate);
|
||||
}
|
||||
|
||||
public static <V> Filter<V> create(
|
||||
Getter<V> getter, HelenusPropertyNode node, Postulate<V> postulate) {
|
||||
Objects.requireNonNull(getter, "empty getter");
|
||||
Objects.requireNonNull(postulate, "empty operator");
|
||||
return new Filter<V>(node, postulate);
|
||||
}
|
||||
|
||||
public static <V> Filter<V> create(Getter<V> getter, Operator op, V val) {
|
||||
Objects.requireNonNull(getter, "empty getter");
|
||||
Objects.requireNonNull(op, "empty op");
|
||||
Objects.requireNonNull(val, "empty value");
|
||||
|
||||
if (op == Operator.IN) {
|
||||
throw new IllegalArgumentException("invalid usage of the 'in' operator, use Filter.in() static method");
|
||||
throw new IllegalArgumentException(
|
||||
"invalid usage of the 'in' operator, use Filter.in() static method");
|
||||
}
|
||||
|
||||
HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter);
|
||||
|
@ -105,6 +104,14 @@ public final class Filter<V> {
|
|||
return new Filter<V>(node, postulate);
|
||||
}
|
||||
|
||||
public HelenusPropertyNode getNode() {
|
||||
return node;
|
||||
}
|
||||
|
||||
public Clause getClause(ColumnValuePreparer valuePreparer) {
|
||||
return postulate.getClause(node, valuePreparer);
|
||||
}
|
||||
|
||||
public V[] postulateValues() {
|
||||
return postulate.values();
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -15,17 +16,15 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import com.datastax.driver.core.Cluster;
|
||||
import com.datastax.driver.core.Metadata;
|
||||
import com.datastax.driver.core.Session;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
import com.datastax.driver.core.Cluster;
|
||||
import com.datastax.driver.core.Metadata;
|
||||
import com.datastax.driver.core.Session;
|
||||
|
||||
import net.helenus.config.DefaultHelenusSettings;
|
||||
import net.helenus.config.HelenusSettings;
|
||||
import net.helenus.core.reflect.DslExportable;
|
||||
|
@ -35,14 +34,15 @@ import net.helenus.support.HelenusMappingException;
|
|||
|
||||
public final class Helenus {
|
||||
|
||||
private static volatile HelenusSettings settings = new DefaultHelenusSettings();
|
||||
private static final ConcurrentMap<Class<?>, Object> dslCache = new ConcurrentHashMap<Class<?>, Object>();
|
||||
private static final ConcurrentMap<Class<?>, Metadata> metadataForEntity = new ConcurrentHashMap<Class<?>, Metadata>();
|
||||
private static final ConcurrentMap<Class<?>, Object> dslCache =
|
||||
new ConcurrentHashMap<Class<?>, Object>();
|
||||
private static final ConcurrentMap<Class<?>, Metadata> metadataForEntity =
|
||||
new ConcurrentHashMap<Class<?>, Metadata>();
|
||||
private static final Set<HelenusSession> sessions = new HashSet<HelenusSession>();
|
||||
private static volatile HelenusSettings settings = new DefaultHelenusSettings();
|
||||
private static volatile HelenusSession singleton;
|
||||
|
||||
private Helenus() {
|
||||
}
|
||||
private Helenus() {}
|
||||
|
||||
protected static void setSession(HelenusSession session) {
|
||||
sessions.add(session);
|
||||
|
@ -54,7 +54,8 @@ public final class Helenus {
|
|||
}
|
||||
|
||||
public static void shutdown() {
|
||||
sessions.forEach((session) -> {
|
||||
sessions.forEach(
|
||||
(session) -> {
|
||||
session.close();
|
||||
sessions.remove(session);
|
||||
});
|
||||
|
@ -81,6 +82,10 @@ public final class Helenus {
|
|||
return new SessionInitializer(session);
|
||||
}
|
||||
|
||||
public static SessionInitializer init(Session session, String keyspace) {
|
||||
return new SessionInitializer(session, keyspace);
|
||||
}
|
||||
|
||||
public static SessionInitializer init(Session session) {
|
||||
|
||||
if (session == null) {
|
||||
|
@ -106,7 +111,10 @@ public final class Helenus {
|
|||
return dsl(iface, classLoader, Optional.empty(), metadata);
|
||||
}
|
||||
|
||||
public static <E> E dsl(Class<E> iface, ClassLoader classLoader, Optional<HelenusPropertyNode> parent,
|
||||
public static <E> E dsl(
|
||||
Class<E> iface,
|
||||
ClassLoader classLoader,
|
||||
Optional<HelenusPropertyNode> parent,
|
||||
Metadata metadata) {
|
||||
|
||||
Object instance = null;
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -17,23 +18,17 @@ package net.helenus.core;
|
|||
|
||||
import static net.helenus.core.Query.eq;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.PrintStream;
|
||||
import java.lang.reflect.Constructor;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.Executor;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import com.codahale.metrics.MetricRegistry;
|
||||
import com.datastax.driver.core.*;
|
||||
import com.google.common.cache.Cache;
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import com.google.common.collect.Table;
|
||||
|
||||
import brave.Tracer;
|
||||
import java.io.Closeable;
|
||||
import java.io.PrintStream;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.Executor;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
import javax.cache.Cache;
|
||||
import javax.cache.CacheManager;
|
||||
import net.helenus.core.cache.CacheUtil;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.core.cache.UnboundFacet;
|
||||
|
@ -42,65 +37,76 @@ import net.helenus.core.reflect.Drafted;
|
|||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.core.reflect.MapExportable;
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
import net.helenus.mapping.MappingUtil;
|
||||
import net.helenus.mapping.value.*;
|
||||
import net.helenus.support.DslPropertyException;
|
||||
import net.helenus.support.Fun;
|
||||
import net.helenus.support.*;
|
||||
import net.helenus.support.Fun.Tuple1;
|
||||
import net.helenus.support.Fun.Tuple2;
|
||||
import net.helenus.support.Fun.Tuple6;
|
||||
import net.helenus.support.HelenusException;
|
||||
import net.helenus.support.HelenusMappingException;
|
||||
|
||||
public final class HelenusSession extends AbstractSessionOperations implements Closeable {
|
||||
|
||||
private final int MAX_CACHE_SIZE = 10000;
|
||||
private final int MAX_CACHE_EXPIRE_SECONDS = 600;
|
||||
public class HelenusSession extends AbstractSessionOperations implements Closeable {
|
||||
public static final Object deleted = new Object();
|
||||
|
||||
private final Session session;
|
||||
private final CodecRegistry registry;
|
||||
private volatile String usingKeyspace;
|
||||
private volatile boolean showCql;
|
||||
private final ConsistencyLevel defaultConsistencyLevel;
|
||||
private final boolean defaultQueryIdempotency;
|
||||
private final MetricRegistry metricRegistry;
|
||||
private final Tracer zipkinTracer;
|
||||
private final PrintStream printStream;
|
||||
private final Class<? extends UnitOfWork> unitOfWorkClass;
|
||||
private final SessionRepository sessionRepository;
|
||||
private final Executor executor;
|
||||
private final boolean dropSchemaOnClose;
|
||||
private final Cache sessionCache;
|
||||
|
||||
private final CacheManager cacheManager;
|
||||
private final RowColumnValueProvider valueProvider;
|
||||
private final StatementColumnValuePreparer valuePreparer;
|
||||
private final Metadata metadata;
|
||||
private volatile String usingKeyspace;
|
||||
private volatile boolean showCql;
|
||||
private volatile boolean showValues;
|
||||
|
||||
HelenusSession(Session session, String usingKeyspace, CodecRegistry registry, boolean showCql,
|
||||
PrintStream printStream, SessionRepositoryBuilder sessionRepositoryBuilder, Executor executor,
|
||||
boolean dropSchemaOnClose, ConsistencyLevel consistencyLevel, boolean defaultQueryIdempotency,
|
||||
Class<? extends UnitOfWork> unitOfWorkClass, MetricRegistry metricRegistry, Tracer tracer) {
|
||||
HelenusSession(
|
||||
Session session,
|
||||
String usingKeyspace,
|
||||
CodecRegistry registry,
|
||||
boolean showCql,
|
||||
boolean showValues,
|
||||
PrintStream printStream,
|
||||
SessionRepositoryBuilder sessionRepositoryBuilder,
|
||||
Executor executor,
|
||||
boolean dropSchemaOnClose,
|
||||
ConsistencyLevel consistencyLevel,
|
||||
boolean defaultQueryIdempotency,
|
||||
CacheManager cacheManager,
|
||||
MetricRegistry metricRegistry) {
|
||||
this.session = session;
|
||||
this.registry = registry == null ? CodecRegistry.DEFAULT_INSTANCE : registry;
|
||||
this.usingKeyspace = Objects.requireNonNull(usingKeyspace,
|
||||
"keyspace needs to be selected before creating session");
|
||||
this.usingKeyspace =
|
||||
Objects.requireNonNull(
|
||||
usingKeyspace, "keyspace needs to be selected before creating session");
|
||||
this.showCql = showCql;
|
||||
this.showValues = showValues;
|
||||
this.printStream = printStream;
|
||||
this.sessionRepository = sessionRepositoryBuilder.build();
|
||||
this.sessionRepository =
|
||||
sessionRepositoryBuilder == null ? null : sessionRepositoryBuilder.build();
|
||||
this.executor = executor;
|
||||
this.dropSchemaOnClose = dropSchemaOnClose;
|
||||
this.defaultConsistencyLevel = consistencyLevel;
|
||||
this.defaultQueryIdempotency = defaultQueryIdempotency;
|
||||
this.unitOfWorkClass = unitOfWorkClass;
|
||||
this.metricRegistry = metricRegistry;
|
||||
this.zipkinTracer = tracer;
|
||||
|
||||
this.sessionCache = CacheBuilder.newBuilder().maximumSize(MAX_CACHE_SIZE)
|
||||
.expireAfterAccess(MAX_CACHE_EXPIRE_SECONDS, TimeUnit.SECONDS).recordStats().build();
|
||||
this.cacheManager = cacheManager;
|
||||
|
||||
this.valueProvider = new RowColumnValueProvider(this.sessionRepository);
|
||||
this.valuePreparer = new StatementColumnValuePreparer(this.sessionRepository);
|
||||
this.metadata = session.getCluster().getMetadata();
|
||||
this.metadata = session == null ? null : session.getCluster().getMetadata();
|
||||
}
|
||||
|
||||
public UnitOfWork begin() {
|
||||
return new UnitOfWork(this).begin();
|
||||
}
|
||||
|
||||
public UnitOfWork begin(UnitOfWork parent) {
|
||||
return new UnitOfWork(this, parent).begin();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -139,6 +145,20 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
return this;
|
||||
}
|
||||
|
||||
public HelenusSession showQueryValuesInLog(boolean showValues) {
|
||||
this.showValues = showValues;
|
||||
return this;
|
||||
}
|
||||
|
||||
public HelenusSession showQueryValuesInLog() {
|
||||
this.showValues = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
public boolean showValues() {
|
||||
return showValues;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Executor getExecutor() {
|
||||
return executor;
|
||||
|
@ -159,11 +179,6 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
return valuePreparer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Tracer getZipkinTracer() {
|
||||
return zipkinTracer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetricRegistry getMetricRegistry() {
|
||||
return metricRegistry;
|
||||
|
@ -181,34 +196,53 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
|
||||
@Override
|
||||
public Object checkCache(String tableName, List<Facet> facets) {
|
||||
List<String[]> facetCombinations = CacheUtil.flattenFacets(facets);
|
||||
Object result = null;
|
||||
for (String[] combination : facetCombinations) {
|
||||
String cacheKey = tableName + "." + Arrays.toString(combination);
|
||||
result = sessionCache.getIfPresent(cacheKey);
|
||||
if (cacheManager != null) {
|
||||
Cache<String, Object> cache = cacheManager.getCache(tableName);
|
||||
if (cache != null) {
|
||||
for (String key : CacheUtil.flatKeys(tableName, facets)) {
|
||||
result = cache.get(key);
|
||||
if (result != null) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void cacheEvict(List<Facet> facets) {
|
||||
if (cacheManager != null) {
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
Cache<String, Object> cache = cacheManager.getCache(tableName);
|
||||
if (cache != null) {
|
||||
CacheUtil.flatKeys(tableName, facets).forEach(key -> cache.remove(key));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void updateCache(Object pojo, List<Facet> facets) {
|
||||
Map<String, Object> valueMap = pojo instanceof MapExportable ? ((MapExportable) pojo).toMap() : null;
|
||||
Map<String, Object> valueMap =
|
||||
pojo instanceof MapExportable ? ((MapExportable) pojo).toMap() : null;
|
||||
List<Facet> boundFacets = new ArrayList<>();
|
||||
for (Facet facet : facets) {
|
||||
if (facet instanceof UnboundFacet) {
|
||||
UnboundFacet unboundFacet = (UnboundFacet) facet;
|
||||
UnboundFacet.Binder binder = unboundFacet.binder();
|
||||
unboundFacet.getProperties().forEach(prop -> {
|
||||
for (HelenusProperty prop : unboundFacet.getProperties()) {
|
||||
Object value;
|
||||
if (valueMap == null) {
|
||||
Object value = BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
value = BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
if (value != null) {
|
||||
binder.setValueForProperty(prop, value.toString());
|
||||
} else {
|
||||
binder.setValueForProperty(prop, valueMap.get(prop.getPropertyName()).toString());
|
||||
}
|
||||
});
|
||||
} else {
|
||||
value = valueMap.get(prop.getPropertyName());
|
||||
if (value != null) binder.setValueForProperty(prop, value.toString());
|
||||
}
|
||||
}
|
||||
if (binder.isBound()) {
|
||||
boundFacets.add(binder.bind());
|
||||
}
|
||||
|
@ -218,42 +252,43 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
}
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
List<String[]> facetCombinations = CacheUtil.flattenFacets(boundFacets);
|
||||
Object value = sessionCache.getIfPresent(pojo);
|
||||
Object mergedValue = null;
|
||||
for (String[] combination : facetCombinations) {
|
||||
String cacheKey = tableName + "." + Arrays.toString(combination);
|
||||
if (value == null) {
|
||||
sessionCache.put(cacheKey, pojo);
|
||||
} else {
|
||||
if (mergedValue == null) {
|
||||
mergedValue = pojo;
|
||||
} else {
|
||||
mergedValue = CacheUtil.merge(value, pojo);
|
||||
}
|
||||
sessionCache.put(mergedValue, pojo);
|
||||
}
|
||||
}
|
||||
|
||||
replaceCachedFacetValues(pojo, tableName, facetCombinations);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void mergeCache(Table<String, String, Object> uowCache) {
|
||||
List<Object> pojos = uowCache.values().stream().distinct().collect(Collectors.toList());
|
||||
for (Object pojo : pojos) {
|
||||
public void mergeCache(Table<String, String, Either<Object, List<Facet>>> uowCache) {
|
||||
if (cacheManager != null) {
|
||||
List<Object> items =
|
||||
uowCache
|
||||
.values()
|
||||
.stream()
|
||||
.filter(Either::isLeft)
|
||||
.map(Either::getLeft)
|
||||
.distinct()
|
||||
.collect(Collectors.toList());
|
||||
for (Object pojo : items) {
|
||||
HelenusEntity entity = Helenus.resolve(MappingUtil.getMappingInterface(pojo));
|
||||
Map<String, Object> valueMap = pojo instanceof MapExportable ? ((MapExportable) pojo).toMap() : null;
|
||||
Map<String, Object> valueMap =
|
||||
pojo instanceof MapExportable ? ((MapExportable) pojo).toMap() : null;
|
||||
if (entity.isCacheable()) {
|
||||
List<Facet> boundFacets = new ArrayList<>();
|
||||
for (Facet facet : entity.getFacets()) {
|
||||
if (facet instanceof UnboundFacet) {
|
||||
UnboundFacet unboundFacet = (UnboundFacet) facet;
|
||||
UnboundFacet.Binder binder = unboundFacet.binder();
|
||||
unboundFacet.getProperties().forEach(prop -> {
|
||||
unboundFacet
|
||||
.getProperties()
|
||||
.forEach(
|
||||
prop -> {
|
||||
if (valueMap == null) {
|
||||
Object value = BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
Object value =
|
||||
BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop);
|
||||
binder.setValueForProperty(prop, value.toString());
|
||||
} else {
|
||||
binder.setValueForProperty(prop, valueMap.get(prop.getPropertyName()).toString());
|
||||
Object v = valueMap.get(prop.getPropertyName());
|
||||
if (v != null) {
|
||||
binder.setValueForProperty(prop, v.toString());
|
||||
}
|
||||
}
|
||||
});
|
||||
if (binder.isBound()) {
|
||||
|
@ -263,59 +298,66 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
boundFacets.add(facet);
|
||||
}
|
||||
}
|
||||
String tableName = entity.getName().toCql();
|
||||
// NOTE: should equal `String tableName = CacheUtil.schemaName(facets);`
|
||||
List<String[]> facetCombinations = CacheUtil.flattenFacets(boundFacets);
|
||||
Object value = sessionCache.getIfPresent(pojo);
|
||||
Object mergedValue = null;
|
||||
String tableName = CacheUtil.schemaName(boundFacets);
|
||||
replaceCachedFacetValues(pojo, tableName, facetCombinations);
|
||||
}
|
||||
}
|
||||
|
||||
List<List<Facet>> deletedFacetSets =
|
||||
uowCache
|
||||
.values()
|
||||
.stream()
|
||||
.filter(Either::isRight)
|
||||
.map(Either::getRight)
|
||||
.collect(Collectors.toList());
|
||||
for (List<Facet> facets : deletedFacetSets) {
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
Cache<String, Object> cache = cacheManager.getCache(tableName);
|
||||
if (cache != null) {
|
||||
List<String> keys = CacheUtil.flatKeys(tableName, facets);
|
||||
keys.forEach(key -> cache.remove(key));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void replaceCachedFacetValues(
|
||||
Object pojo, String tableName, List<String[]> facetCombinations) {
|
||||
if (cacheManager != null) {
|
||||
for (String[] combination : facetCombinations) {
|
||||
String cacheKey = tableName + "." + Arrays.toString(combination);
|
||||
if (value == null) {
|
||||
sessionCache.put(cacheKey, pojo);
|
||||
Cache<String, Object> cache = cacheManager.getCache(tableName);
|
||||
if (cache != null) {
|
||||
if (pojo == null || pojo == HelenusSession.deleted) {
|
||||
cache.remove(cacheKey);
|
||||
} else {
|
||||
if (mergedValue == null) {
|
||||
mergedValue = pojo;
|
||||
} else {
|
||||
mergedValue = CacheUtil.merge(value, pojo);
|
||||
}
|
||||
sessionCache.put(mergedValue, pojo);
|
||||
cache.put(cacheKey, pojo);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public CacheManager getCacheManager() {
|
||||
return cacheManager;
|
||||
}
|
||||
|
||||
public Metadata getMetadata() {
|
||||
return metadata;
|
||||
}
|
||||
|
||||
public synchronized UnitOfWork begin() {
|
||||
return begin(null);
|
||||
}
|
||||
|
||||
public synchronized UnitOfWork begin(UnitOfWork parent) {
|
||||
try {
|
||||
Class<? extends UnitOfWork> clazz = unitOfWorkClass;
|
||||
Constructor<? extends UnitOfWork> ctor = clazz.getConstructor(HelenusSession.class, UnitOfWork.class);
|
||||
UnitOfWork uow = ctor.newInstance(this, parent);
|
||||
if (parent != null) {
|
||||
parent.addNestedUnitOfWork(uow);
|
||||
}
|
||||
return uow.begin();
|
||||
} catch (NoSuchMethodException | InvocationTargetException | InstantiationException
|
||||
| IllegalAccessException e) {
|
||||
throw new HelenusException(
|
||||
String.format("Unable to instantiate {} as a UnitOfWork.", unitOfWorkClass.getSimpleName()), e);
|
||||
}
|
||||
}
|
||||
|
||||
public <E> SelectOperation<E> select(E pojo) {
|
||||
Objects.requireNonNull(pojo, "supplied object must be a dsl for a registered entity but cannot be null");
|
||||
Objects.requireNonNull(
|
||||
pojo, "supplied object must be a dsl for a registered entity but cannot be null");
|
||||
ColumnValueProvider valueProvider = getValueProvider();
|
||||
HelenusEntity entity = Helenus.resolve(pojo);
|
||||
Class<?> entityClass = entity.getMappingInterface();
|
||||
|
||||
return new SelectOperation<E>(this, entity, (r) -> {
|
||||
return new SelectOperation<E>(
|
||||
this,
|
||||
entity,
|
||||
(r) -> {
|
||||
Map<String, Object> map = new ValueProviderMap(r, valueProvider, entity);
|
||||
return (E) Helenus.map(entityClass, map);
|
||||
});
|
||||
|
@ -326,7 +368,10 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
ColumnValueProvider valueProvider = getValueProvider();
|
||||
HelenusEntity entity = Helenus.entity(entityClass);
|
||||
|
||||
return new SelectOperation<E>(this, entity, (r) -> {
|
||||
return new SelectOperation<E>(
|
||||
this,
|
||||
entity,
|
||||
(r) -> {
|
||||
Map<String, Object> map = new ValueProviderMap(r, valueProvider, entity);
|
||||
return (E) Helenus.map(entityClass, map);
|
||||
});
|
||||
|
@ -336,13 +381,22 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
return new SelectOperation<Fun.ArrayTuple>(this);
|
||||
}
|
||||
|
||||
public SelectOperation<Row> selectAll(Class<?> entityClass) {
|
||||
public <E> SelectOperation<E> selectAll(Class<E> entityClass) {
|
||||
Objects.requireNonNull(entityClass, "entityClass is empty");
|
||||
return new SelectOperation<Row>(this, Helenus.entity(entityClass));
|
||||
HelenusEntity entity = Helenus.entity(entityClass);
|
||||
|
||||
return new SelectOperation<E>(
|
||||
this,
|
||||
entity,
|
||||
(r) -> {
|
||||
Map<String, Object> map = new ValueProviderMap(r, valueProvider, entity);
|
||||
return (E) Helenus.map(entityClass, map);
|
||||
});
|
||||
}
|
||||
|
||||
public <E> SelectOperation<Row> selectAll(E pojo) {
|
||||
Objects.requireNonNull(pojo, "supplied object must be a dsl for a registered entity but cannot be null");
|
||||
Objects.requireNonNull(
|
||||
pojo, "supplied object must be a dsl for a registered entity but cannot be null");
|
||||
HelenusEntity entity = Helenus.resolve(pojo);
|
||||
return new SelectOperation<Row>(this, entity);
|
||||
}
|
||||
|
@ -357,7 +411,8 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
Objects.requireNonNull(getter1, "field 1 is empty");
|
||||
|
||||
HelenusPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
|
||||
return new SelectOperation<Tuple1<V1>>(this, new Mappers.Mapper1<V1>(getValueProvider(), p1), p1);
|
||||
return new SelectOperation<Tuple1<V1>>(
|
||||
this, new Mappers.Mapper1<V1>(getValueProvider(), p1), p1);
|
||||
}
|
||||
|
||||
public <V1, V2> SelectOperation<Tuple2<V1, V2>> select(Getter<V1> getter1, Getter<V2> getter2) {
|
||||
|
@ -366,12 +421,12 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
|
||||
HelenusPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
|
||||
HelenusPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
|
||||
return new SelectOperation<Fun.Tuple2<V1, V2>>(this, new Mappers.Mapper2<V1, V2>(getValueProvider(), p1, p2),
|
||||
p1, p2);
|
||||
return new SelectOperation<Fun.Tuple2<V1, V2>>(
|
||||
this, new Mappers.Mapper2<V1, V2>(getValueProvider(), p1, p2), p1, p2);
|
||||
}
|
||||
|
||||
public <V1, V2, V3> SelectOperation<Fun.Tuple3<V1, V2, V3>> select(Getter<V1> getter1, Getter<V2> getter2,
|
||||
Getter<V3> getter3) {
|
||||
public <V1, V2, V3> SelectOperation<Fun.Tuple3<V1, V2, V3>> select(
|
||||
Getter<V1> getter1, Getter<V2> getter2, Getter<V3> getter3) {
|
||||
Objects.requireNonNull(getter1, "field 1 is empty");
|
||||
Objects.requireNonNull(getter2, "field 2 is empty");
|
||||
Objects.requireNonNull(getter3, "field 3 is empty");
|
||||
|
@ -379,12 +434,12 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
HelenusPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
|
||||
HelenusPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
|
||||
HelenusPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3);
|
||||
return new SelectOperation<Fun.Tuple3<V1, V2, V3>>(this,
|
||||
new Mappers.Mapper3<V1, V2, V3>(getValueProvider(), p1, p2, p3), p1, p2, p3);
|
||||
return new SelectOperation<Fun.Tuple3<V1, V2, V3>>(
|
||||
this, new Mappers.Mapper3<V1, V2, V3>(getValueProvider(), p1, p2, p3), p1, p2, p3);
|
||||
}
|
||||
|
||||
public <V1, V2, V3, V4> SelectOperation<Fun.Tuple4<V1, V2, V3, V4>> select(Getter<V1> getter1, Getter<V2> getter2,
|
||||
Getter<V3> getter3, Getter<V4> getter4) {
|
||||
public <V1, V2, V3, V4> SelectOperation<Fun.Tuple4<V1, V2, V3, V4>> select(
|
||||
Getter<V1> getter1, Getter<V2> getter2, Getter<V3> getter3, Getter<V4> getter4) {
|
||||
Objects.requireNonNull(getter1, "field 1 is empty");
|
||||
Objects.requireNonNull(getter2, "field 2 is empty");
|
||||
Objects.requireNonNull(getter3, "field 3 is empty");
|
||||
|
@ -394,12 +449,21 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
HelenusPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
|
||||
HelenusPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3);
|
||||
HelenusPropertyNode p4 = MappingUtil.resolveMappingProperty(getter4);
|
||||
return new SelectOperation<Fun.Tuple4<V1, V2, V3, V4>>(this,
|
||||
new Mappers.Mapper4<V1, V2, V3, V4>(getValueProvider(), p1, p2, p3, p4), p1, p2, p3, p4);
|
||||
return new SelectOperation<Fun.Tuple4<V1, V2, V3, V4>>(
|
||||
this,
|
||||
new Mappers.Mapper4<V1, V2, V3, V4>(getValueProvider(), p1, p2, p3, p4),
|
||||
p1,
|
||||
p2,
|
||||
p3,
|
||||
p4);
|
||||
}
|
||||
|
||||
public <V1, V2, V3, V4, V5> SelectOperation<Fun.Tuple5<V1, V2, V3, V4, V5>> select(Getter<V1> getter1,
|
||||
Getter<V2> getter2, Getter<V3> getter3, Getter<V4> getter4, Getter<V5> getter5) {
|
||||
public <V1, V2, V3, V4, V5> SelectOperation<Fun.Tuple5<V1, V2, V3, V4, V5>> select(
|
||||
Getter<V1> getter1,
|
||||
Getter<V2> getter2,
|
||||
Getter<V3> getter3,
|
||||
Getter<V4> getter4,
|
||||
Getter<V5> getter5) {
|
||||
Objects.requireNonNull(getter1, "field 1 is empty");
|
||||
Objects.requireNonNull(getter2, "field 2 is empty");
|
||||
Objects.requireNonNull(getter3, "field 3 is empty");
|
||||
|
@ -411,12 +475,23 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
HelenusPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3);
|
||||
HelenusPropertyNode p4 = MappingUtil.resolveMappingProperty(getter4);
|
||||
HelenusPropertyNode p5 = MappingUtil.resolveMappingProperty(getter5);
|
||||
return new SelectOperation<Fun.Tuple5<V1, V2, V3, V4, V5>>(this,
|
||||
new Mappers.Mapper5<V1, V2, V3, V4, V5>(getValueProvider(), p1, p2, p3, p4, p5), p1, p2, p3, p4, p5);
|
||||
return new SelectOperation<Fun.Tuple5<V1, V2, V3, V4, V5>>(
|
||||
this,
|
||||
new Mappers.Mapper5<V1, V2, V3, V4, V5>(getValueProvider(), p1, p2, p3, p4, p5),
|
||||
p1,
|
||||
p2,
|
||||
p3,
|
||||
p4,
|
||||
p5);
|
||||
}
|
||||
|
||||
public <V1, V2, V3, V4, V5, V6> SelectOperation<Fun.Tuple6<V1, V2, V3, V4, V5, V6>> select(Getter<V1> getter1,
|
||||
Getter<V2> getter2, Getter<V3> getter3, Getter<V4> getter4, Getter<V5> getter5, Getter<V6> getter6) {
|
||||
public <V1, V2, V3, V4, V5, V6> SelectOperation<Fun.Tuple6<V1, V2, V3, V4, V5, V6>> select(
|
||||
Getter<V1> getter1,
|
||||
Getter<V2> getter2,
|
||||
Getter<V3> getter3,
|
||||
Getter<V4> getter4,
|
||||
Getter<V5> getter5,
|
||||
Getter<V6> getter6) {
|
||||
Objects.requireNonNull(getter1, "field 1 is empty");
|
||||
Objects.requireNonNull(getter2, "field 2 is empty");
|
||||
Objects.requireNonNull(getter3, "field 3 is empty");
|
||||
|
@ -430,14 +505,26 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
HelenusPropertyNode p4 = MappingUtil.resolveMappingProperty(getter4);
|
||||
HelenusPropertyNode p5 = MappingUtil.resolveMappingProperty(getter5);
|
||||
HelenusPropertyNode p6 = MappingUtil.resolveMappingProperty(getter6);
|
||||
return new SelectOperation<Tuple6<V1, V2, V3, V4, V5, V6>>(this,
|
||||
new Mappers.Mapper6<V1, V2, V3, V4, V5, V6>(getValueProvider(), p1, p2, p3, p4, p5, p6), p1, p2, p3, p4,
|
||||
p5, p6);
|
||||
return new SelectOperation<Tuple6<V1, V2, V3, V4, V5, V6>>(
|
||||
this,
|
||||
new Mappers.Mapper6<V1, V2, V3, V4, V5, V6>(getValueProvider(), p1, p2, p3, p4, p5, p6),
|
||||
p1,
|
||||
p2,
|
||||
p3,
|
||||
p4,
|
||||
p5,
|
||||
p6);
|
||||
}
|
||||
|
||||
public <V1, V2, V3, V4, V5, V6, V7> SelectOperation<Fun.Tuple7<V1, V2, V3, V4, V5, V6, V7>> select(
|
||||
Getter<V1> getter1, Getter<V2> getter2, Getter<V3> getter3, Getter<V4> getter4, Getter<V5> getter5,
|
||||
Getter<V6> getter6, Getter<V7> getter7) {
|
||||
public <V1, V2, V3, V4, V5, V6, V7>
|
||||
SelectOperation<Fun.Tuple7<V1, V2, V3, V4, V5, V6, V7>> select(
|
||||
Getter<V1> getter1,
|
||||
Getter<V2> getter2,
|
||||
Getter<V3> getter3,
|
||||
Getter<V4> getter4,
|
||||
Getter<V5> getter5,
|
||||
Getter<V6> getter6,
|
||||
Getter<V7> getter7) {
|
||||
Objects.requireNonNull(getter1, "field 1 is empty");
|
||||
Objects.requireNonNull(getter2, "field 2 is empty");
|
||||
Objects.requireNonNull(getter3, "field 3 is empty");
|
||||
|
@ -453,9 +540,17 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
HelenusPropertyNode p5 = MappingUtil.resolveMappingProperty(getter5);
|
||||
HelenusPropertyNode p6 = MappingUtil.resolveMappingProperty(getter6);
|
||||
HelenusPropertyNode p7 = MappingUtil.resolveMappingProperty(getter7);
|
||||
return new SelectOperation<Fun.Tuple7<V1, V2, V3, V4, V5, V6, V7>>(this,
|
||||
new Mappers.Mapper7<V1, V2, V3, V4, V5, V6, V7>(getValueProvider(), p1, p2, p3, p4, p5, p6, p7), p1, p2,
|
||||
p3, p4, p5, p6, p7);
|
||||
return new SelectOperation<Fun.Tuple7<V1, V2, V3, V4, V5, V6, V7>>(
|
||||
this,
|
||||
new Mappers.Mapper7<V1, V2, V3, V4, V5, V6, V7>(
|
||||
getValueProvider(), p1, p2, p3, p4, p5, p6, p7),
|
||||
p1,
|
||||
p2,
|
||||
p3,
|
||||
p4,
|
||||
p5,
|
||||
p6,
|
||||
p7);
|
||||
}
|
||||
|
||||
public CountOperation count() {
|
||||
|
@ -471,6 +566,14 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
return new UpdateOperation<ResultSet>(this);
|
||||
}
|
||||
|
||||
public <E> UpdateOperation<E> update(Object pojo) {
|
||||
if (pojo instanceof MapExportable == false) {
|
||||
throw new HelenusMappingException(
|
||||
"update of objects that don't implement MapExportable is not yet supported");
|
||||
}
|
||||
return new UpdateOperation<E>(this, pojo);
|
||||
}
|
||||
|
||||
public <E> UpdateOperation<E> update(Drafted<E> drafted) {
|
||||
if (drafted instanceof AbstractEntityDraft == false) {
|
||||
throw new HelenusMappingException(
|
||||
|
@ -483,19 +586,24 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
HelenusEntity entity = Helenus.entity(draft.getEntityClass());
|
||||
|
||||
// Add all the mutated values contained in the draft.
|
||||
entity.getOrderedProperties().forEach(property -> {
|
||||
entity
|
||||
.getOrderedProperties()
|
||||
.forEach(
|
||||
property -> {
|
||||
switch (property.getColumnType()) {
|
||||
case PARTITION_KEY :
|
||||
case CLUSTERING_COLUMN :
|
||||
case PARTITION_KEY:
|
||||
case CLUSTERING_COLUMN:
|
||||
break;
|
||||
default :
|
||||
default:
|
||||
String propertyName = property.getPropertyName();
|
||||
if (mutatedProperties.contains(propertyName)) {
|
||||
Object value = map.get(propertyName);
|
||||
Getter<Object> getter = new Getter<Object>() {
|
||||
Getter<Object> getter =
|
||||
new Getter<Object>() {
|
||||
@Override
|
||||
public Object get() {
|
||||
throw new DslPropertyException(new HelenusPropertyNode(property, Optional.empty()));
|
||||
throw new DslPropertyException(
|
||||
new HelenusPropertyNode(property, Optional.empty()));
|
||||
}
|
||||
};
|
||||
update.set(getter, value);
|
||||
|
@ -505,16 +613,21 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
|
||||
// Add the partition and clustering keys if they were in the draft (normally the
|
||||
// case).
|
||||
entity.getOrderedProperties().forEach(property -> {
|
||||
entity
|
||||
.getOrderedProperties()
|
||||
.forEach(
|
||||
property -> {
|
||||
switch (property.getColumnType()) {
|
||||
case PARTITION_KEY :
|
||||
case CLUSTERING_COLUMN :
|
||||
case PARTITION_KEY:
|
||||
case CLUSTERING_COLUMN:
|
||||
String propertyName = property.getPropertyName();
|
||||
Object value = map.get(propertyName);
|
||||
Getter<Object> getter = new Getter<Object>() {
|
||||
Getter<Object> getter =
|
||||
new Getter<Object>() {
|
||||
@Override
|
||||
public Object get() {
|
||||
throw new DslPropertyException(new HelenusPropertyNode(property, Optional.empty()));
|
||||
throw new DslPropertyException(
|
||||
new HelenusPropertyNode(property, Optional.empty()));
|
||||
}
|
||||
};
|
||||
update.where(getter, eq(value));
|
||||
|
@ -542,7 +655,8 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
}
|
||||
|
||||
public <T> InsertOperation<T> insert(T pojo) {
|
||||
Objects.requireNonNull(pojo,
|
||||
Objects.requireNonNull(
|
||||
pojo,
|
||||
"supplied object must be either an instance of the entity class or a dsl for it, but cannot be null");
|
||||
HelenusEntity entity = null;
|
||||
try {
|
||||
|
@ -550,23 +664,23 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
} catch (HelenusMappingException e) {
|
||||
}
|
||||
if (entity != null) {
|
||||
return new InsertOperation<T>(this, entity.getMappingInterface(), true);
|
||||
return new InsertOperation<T>(this, entity, entity.getMappingInterface(), true);
|
||||
} else {
|
||||
return this.<T>insert(pojo, null);
|
||||
return this.<T>insert(pojo, null, null);
|
||||
}
|
||||
}
|
||||
|
||||
public <T> InsertOperation<T> insert(Drafted draft) {
|
||||
return insert(draft.build(), draft.mutated());
|
||||
return insert(draft.build(), draft.mutated(), draft.read());
|
||||
}
|
||||
|
||||
private <T> InsertOperation<T> insert(T pojo, Set<String> mutations) {
|
||||
private <T> InsertOperation<T> insert(T pojo, Set<String> mutations, Set<String> read) {
|
||||
Objects.requireNonNull(pojo, "pojo is empty");
|
||||
|
||||
Class<?> iface = MappingUtil.getMappingInterface(pojo);
|
||||
HelenusEntity entity = Helenus.entity(iface);
|
||||
|
||||
return new InsertOperation<T>(this, entity, pojo, mutations, true);
|
||||
return new InsertOperation<T>(this, entity, pojo, mutations, read, true);
|
||||
}
|
||||
|
||||
public InsertOperation<ResultSet> upsert() {
|
||||
|
@ -578,11 +692,12 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
}
|
||||
|
||||
public <T> InsertOperation<T> upsert(Drafted draft) {
|
||||
return this.<T>upsert((T) draft.build(), draft.mutated());
|
||||
return this.<T>upsert((T) draft.build(), draft.mutated(), draft.read());
|
||||
}
|
||||
|
||||
public <T> InsertOperation<T> upsert(T pojo) {
|
||||
Objects.requireNonNull(pojo,
|
||||
Objects.requireNonNull(
|
||||
pojo,
|
||||
"supplied object must be either an instance of the entity class or a dsl for it, but cannot be null");
|
||||
HelenusEntity entity = null;
|
||||
try {
|
||||
|
@ -590,19 +705,19 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
} catch (HelenusMappingException e) {
|
||||
}
|
||||
if (entity != null) {
|
||||
return new InsertOperation<T>(this, entity.getMappingInterface(), false);
|
||||
return new InsertOperation<T>(this, entity, entity.getMappingInterface(), false);
|
||||
} else {
|
||||
return this.<T>upsert(pojo, null);
|
||||
return this.<T>upsert(pojo, null, null);
|
||||
}
|
||||
}
|
||||
|
||||
private <T> InsertOperation<T> upsert(T pojo, Set<String> mutations) {
|
||||
private <T> InsertOperation<T> upsert(T pojo, Set<String> mutations, Set<String> read) {
|
||||
Objects.requireNonNull(pojo, "pojo is empty");
|
||||
|
||||
Class<?> iface = MappingUtil.getMappingInterface(pojo);
|
||||
HelenusEntity entity = Helenus.entity(iface);
|
||||
|
||||
return new InsertOperation<T>(this, entity, pojo, mutations, false);
|
||||
return new InsertOperation<T>(this, entity, pojo, mutations, read, false);
|
||||
}
|
||||
|
||||
public DeleteOperation delete() {
|
||||
|
@ -623,6 +738,9 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
}
|
||||
|
||||
public void close() {
|
||||
if (session == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (session.isClosed()) {
|
||||
return;
|
||||
|
@ -652,13 +770,16 @@ public final class HelenusSession extends AbstractSessionOperations implements C
|
|||
private void dropEntity(HelenusEntity entity) {
|
||||
|
||||
switch (entity.getType()) {
|
||||
case TABLE :
|
||||
execute(SchemaUtil.dropTable(entity), true);
|
||||
case TABLE:
|
||||
execute(SchemaUtil.dropTable(entity));
|
||||
break;
|
||||
|
||||
case UDT :
|
||||
execute(SchemaUtil.dropUserType(entity), true);
|
||||
case UDT:
|
||||
execute(SchemaUtil.dropUserType(entity));
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new HelenusException("Unknown entity type.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,9 +17,7 @@
|
|||
package net.helenus.core;
|
||||
|
||||
import java.lang.annotation.Annotation;
|
||||
|
||||
import javax.validation.ConstraintValidator;
|
||||
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
import net.helenus.support.HelenusException;
|
||||
import net.helenus.support.HelenusMappingException;
|
||||
|
@ -37,7 +36,8 @@ public enum HelenusValidator implements PropertyValueValidator {
|
|||
try {
|
||||
valid = typeless.isValid(value, null);
|
||||
} catch (ClassCastException e) {
|
||||
throw new HelenusMappingException("validator was used for wrong type '" + value + "' in " + prop, e);
|
||||
throw new HelenusMappingException(
|
||||
"validator was used for wrong type '" + value + "' in " + prop, e);
|
||||
}
|
||||
|
||||
if (!valid) {
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -15,10 +16,8 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import java.util.function.Function;
|
||||
|
||||
import com.datastax.driver.core.Row;
|
||||
|
||||
import java.util.function.Function;
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
import net.helenus.mapping.value.ColumnValueProvider;
|
||||
|
@ -26,8 +25,7 @@ import net.helenus.support.Fun;
|
|||
|
||||
public final class Mappers {
|
||||
|
||||
private Mappers() {
|
||||
}
|
||||
private Mappers() {}
|
||||
|
||||
public static final class Mapper1<A> implements Function<Row, Fun.Tuple1<A>> {
|
||||
|
||||
|
@ -59,7 +57,8 @@ public final class Mappers {
|
|||
|
||||
@Override
|
||||
public Fun.Tuple2<A, B> apply(Row row) {
|
||||
return new Fun.Tuple2<A, B>(provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2));
|
||||
return new Fun.Tuple2<A, B>(
|
||||
provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -70,7 +69,10 @@ public final class Mappers {
|
|||
private final HelenusProperty p2;
|
||||
private final HelenusProperty p3;
|
||||
|
||||
public Mapper3(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2,
|
||||
public Mapper3(
|
||||
ColumnValueProvider provider,
|
||||
HelenusPropertyNode p1,
|
||||
HelenusPropertyNode p2,
|
||||
HelenusPropertyNode p3) {
|
||||
this.provider = provider;
|
||||
this.p1 = p1.getProperty();
|
||||
|
@ -80,7 +82,9 @@ public final class Mappers {
|
|||
|
||||
@Override
|
||||
public Fun.Tuple3<A, B, C> apply(Row row) {
|
||||
return new Fun.Tuple3<A, B, C>(provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2),
|
||||
return new Fun.Tuple3<A, B, C>(
|
||||
provider.getColumnValue(row, 0, p1),
|
||||
provider.getColumnValue(row, 1, p2),
|
||||
provider.getColumnValue(row, 2, p3));
|
||||
}
|
||||
}
|
||||
|
@ -93,8 +97,12 @@ public final class Mappers {
|
|||
private final HelenusProperty p3;
|
||||
private final HelenusProperty p4;
|
||||
|
||||
public Mapper4(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2,
|
||||
HelenusPropertyNode p3, HelenusPropertyNode p4) {
|
||||
public Mapper4(
|
||||
ColumnValueProvider provider,
|
||||
HelenusPropertyNode p1,
|
||||
HelenusPropertyNode p2,
|
||||
HelenusPropertyNode p3,
|
||||
HelenusPropertyNode p4) {
|
||||
this.provider = provider;
|
||||
this.p1 = p1.getProperty();
|
||||
this.p2 = p2.getProperty();
|
||||
|
@ -104,18 +112,27 @@ public final class Mappers {
|
|||
|
||||
@Override
|
||||
public Fun.Tuple4<A, B, C, D> apply(Row row) {
|
||||
return new Fun.Tuple4<A, B, C, D>(provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2),
|
||||
provider.getColumnValue(row, 2, p3), provider.getColumnValue(row, 3, p4));
|
||||
return new Fun.Tuple4<A, B, C, D>(
|
||||
provider.getColumnValue(row, 0, p1),
|
||||
provider.getColumnValue(row, 1, p2),
|
||||
provider.getColumnValue(row, 2, p3),
|
||||
provider.getColumnValue(row, 3, p4));
|
||||
}
|
||||
}
|
||||
|
||||
public static final class Mapper5<A, B, C, D, E> implements Function<Row, Fun.Tuple5<A, B, C, D, E>> {
|
||||
public static final class Mapper5<A, B, C, D, E>
|
||||
implements Function<Row, Fun.Tuple5<A, B, C, D, E>> {
|
||||
|
||||
private final ColumnValueProvider provider;
|
||||
private final HelenusProperty p1, p2, p3, p4, p5;
|
||||
|
||||
public Mapper5(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2,
|
||||
HelenusPropertyNode p3, HelenusPropertyNode p4, HelenusPropertyNode p5) {
|
||||
public Mapper5(
|
||||
ColumnValueProvider provider,
|
||||
HelenusPropertyNode p1,
|
||||
HelenusPropertyNode p2,
|
||||
HelenusPropertyNode p3,
|
||||
HelenusPropertyNode p4,
|
||||
HelenusPropertyNode p5) {
|
||||
this.provider = provider;
|
||||
this.p1 = p1.getProperty();
|
||||
this.p2 = p2.getProperty();
|
||||
|
@ -126,19 +143,29 @@ public final class Mappers {
|
|||
|
||||
@Override
|
||||
public Fun.Tuple5<A, B, C, D, E> apply(Row row) {
|
||||
return new Fun.Tuple5<A, B, C, D, E>(provider.getColumnValue(row, 0, p1),
|
||||
provider.getColumnValue(row, 1, p2), provider.getColumnValue(row, 2, p3),
|
||||
provider.getColumnValue(row, 3, p4), provider.getColumnValue(row, 4, p5));
|
||||
return new Fun.Tuple5<A, B, C, D, E>(
|
||||
provider.getColumnValue(row, 0, p1),
|
||||
provider.getColumnValue(row, 1, p2),
|
||||
provider.getColumnValue(row, 2, p3),
|
||||
provider.getColumnValue(row, 3, p4),
|
||||
provider.getColumnValue(row, 4, p5));
|
||||
}
|
||||
}
|
||||
|
||||
public static final class Mapper6<A, B, C, D, E, F> implements Function<Row, Fun.Tuple6<A, B, C, D, E, F>> {
|
||||
public static final class Mapper6<A, B, C, D, E, F>
|
||||
implements Function<Row, Fun.Tuple6<A, B, C, D, E, F>> {
|
||||
|
||||
private final ColumnValueProvider provider;
|
||||
private final HelenusProperty p1, p2, p3, p4, p5, p6;
|
||||
|
||||
public Mapper6(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2,
|
||||
HelenusPropertyNode p3, HelenusPropertyNode p4, HelenusPropertyNode p5, HelenusPropertyNode p6) {
|
||||
public Mapper6(
|
||||
ColumnValueProvider provider,
|
||||
HelenusPropertyNode p1,
|
||||
HelenusPropertyNode p2,
|
||||
HelenusPropertyNode p3,
|
||||
HelenusPropertyNode p4,
|
||||
HelenusPropertyNode p5,
|
||||
HelenusPropertyNode p6) {
|
||||
this.provider = provider;
|
||||
this.p1 = p1.getProperty();
|
||||
this.p2 = p2.getProperty();
|
||||
|
@ -150,20 +177,30 @@ public final class Mappers {
|
|||
|
||||
@Override
|
||||
public Fun.Tuple6<A, B, C, D, E, F> apply(Row row) {
|
||||
return new Fun.Tuple6<A, B, C, D, E, F>(provider.getColumnValue(row, 0, p1),
|
||||
provider.getColumnValue(row, 1, p2), provider.getColumnValue(row, 2, p3),
|
||||
provider.getColumnValue(row, 3, p4), provider.getColumnValue(row, 4, p5),
|
||||
return new Fun.Tuple6<A, B, C, D, E, F>(
|
||||
provider.getColumnValue(row, 0, p1),
|
||||
provider.getColumnValue(row, 1, p2),
|
||||
provider.getColumnValue(row, 2, p3),
|
||||
provider.getColumnValue(row, 3, p4),
|
||||
provider.getColumnValue(row, 4, p5),
|
||||
provider.getColumnValue(row, 5, p6));
|
||||
}
|
||||
}
|
||||
|
||||
public static final class Mapper7<A, B, C, D, E, F, G> implements Function<Row, Fun.Tuple7<A, B, C, D, E, F, G>> {
|
||||
public static final class Mapper7<A, B, C, D, E, F, G>
|
||||
implements Function<Row, Fun.Tuple7<A, B, C, D, E, F, G>> {
|
||||
|
||||
private final ColumnValueProvider provider;
|
||||
private final HelenusProperty p1, p2, p3, p4, p5, p6, p7;
|
||||
|
||||
public Mapper7(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2,
|
||||
HelenusPropertyNode p3, HelenusPropertyNode p4, HelenusPropertyNode p5, HelenusPropertyNode p6,
|
||||
public Mapper7(
|
||||
ColumnValueProvider provider,
|
||||
HelenusPropertyNode p1,
|
||||
HelenusPropertyNode p2,
|
||||
HelenusPropertyNode p3,
|
||||
HelenusPropertyNode p4,
|
||||
HelenusPropertyNode p5,
|
||||
HelenusPropertyNode p6,
|
||||
HelenusPropertyNode p7) {
|
||||
this.provider = provider;
|
||||
this.p1 = p1.getProperty();
|
||||
|
@ -177,10 +214,14 @@ public final class Mappers {
|
|||
|
||||
@Override
|
||||
public Fun.Tuple7<A, B, C, D, E, F, G> apply(Row row) {
|
||||
return new Fun.Tuple7<A, B, C, D, E, F, G>(provider.getColumnValue(row, 0, p1),
|
||||
provider.getColumnValue(row, 1, p2), provider.getColumnValue(row, 2, p3),
|
||||
provider.getColumnValue(row, 3, p4), provider.getColumnValue(row, 4, p5),
|
||||
provider.getColumnValue(row, 5, p6), provider.getColumnValue(row, 6, p7));
|
||||
return new Fun.Tuple7<A, B, C, D, E, F, G>(
|
||||
provider.getColumnValue(row, 0, p1),
|
||||
provider.getColumnValue(row, 1, p2),
|
||||
provider.getColumnValue(row, 2, p3),
|
||||
provider.getColumnValue(row, 3, p4),
|
||||
provider.getColumnValue(row, 4, p5),
|
||||
provider.getColumnValue(row, 5, p6),
|
||||
provider.getColumnValue(row, 6, p7));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -31,8 +32,6 @@ public enum Operator {
|
|||
|
||||
LTE("<=");
|
||||
|
||||
private final String name;
|
||||
|
||||
private static final Map<String, Operator> indexByName = new HashMap<String, Operator>();
|
||||
|
||||
static {
|
||||
|
@ -41,15 +40,17 @@ public enum Operator {
|
|||
}
|
||||
}
|
||||
|
||||
private final String name;
|
||||
|
||||
private Operator(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public static Operator findByOperator(String name) {
|
||||
return indexByName.get(name);
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,10 +1,8 @@
|
|||
package net.helenus.core;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
import com.datastax.driver.core.querybuilder.Ordering;
|
||||
import com.datastax.driver.core.querybuilder.QueryBuilder;
|
||||
|
||||
import java.util.Objects;
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.mapping.ColumnType;
|
||||
import net.helenus.mapping.MappingUtil;
|
||||
|
@ -34,10 +32,10 @@ public final class Ordered {
|
|||
}
|
||||
|
||||
switch (direction) {
|
||||
case ASC :
|
||||
case ASC:
|
||||
return QueryBuilder.asc(propNode.getColumnName());
|
||||
|
||||
case DESC :
|
||||
case DESC:
|
||||
return QueryBuilder.desc(propNode.getColumnName());
|
||||
}
|
||||
|
||||
|
|
|
@ -2,24 +2,73 @@ package net.helenus.core;
|
|||
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
|
||||
import net.helenus.support.CheckedRunnable;
|
||||
|
||||
public class PostCommitFunction<T, R> implements java.util.function.Function<T, R> {
|
||||
public static final PostCommitFunction<Void, Void> NULL_ABORT = new PostCommitFunction<Void, Void>(null, null, null, false);
|
||||
public static final PostCommitFunction<Void, Void> NULL_COMMIT = new PostCommitFunction<Void, Void>(null, null, null, true);
|
||||
|
||||
private final UnitOfWork uow;
|
||||
private final List<CommitThunk> postCommit;
|
||||
private final List<CheckedRunnable> commitThunks;
|
||||
private final List<CheckedRunnable> abortThunks;
|
||||
private Consumer<? super Throwable> exceptionallyThunk;
|
||||
private boolean committed;
|
||||
|
||||
PostCommitFunction(UnitOfWork uow, List<CommitThunk> postCommit) {
|
||||
this.uow = uow;
|
||||
this.postCommit = postCommit;
|
||||
PostCommitFunction(List<CheckedRunnable> postCommit, List<CheckedRunnable> abortThunks,
|
||||
Consumer<? super Throwable> exceptionallyThunk,
|
||||
boolean committed) {
|
||||
this.commitThunks = postCommit;
|
||||
this.abortThunks = abortThunks;
|
||||
this.exceptionallyThunk = exceptionallyThunk;
|
||||
this.committed = committed;
|
||||
}
|
||||
|
||||
public void andThen(CommitThunk after) {
|
||||
private void apply(CheckedRunnable... fns) {
|
||||
try {
|
||||
for (CheckedRunnable fn : fns) {
|
||||
fn.run();
|
||||
}
|
||||
} catch (Throwable t) {
|
||||
if (exceptionallyThunk != null) {
|
||||
exceptionallyThunk.accept(t);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public PostCommitFunction<T, R> andThen(CheckedRunnable... after) {
|
||||
Objects.requireNonNull(after);
|
||||
if (postCommit == null) {
|
||||
after.apply();
|
||||
} else {
|
||||
postCommit.add(after);
|
||||
if (commitThunks == null) {
|
||||
if (committed) {
|
||||
apply(after);
|
||||
}
|
||||
} else {
|
||||
for (CheckedRunnable fn : after) {
|
||||
commitThunks.add(fn);
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public PostCommitFunction<T, R> orElse(CheckedRunnable... after) {
|
||||
Objects.requireNonNull(after);
|
||||
if (abortThunks == null) {
|
||||
if (!committed) {
|
||||
apply(after);
|
||||
}
|
||||
} else {
|
||||
for (CheckedRunnable fn : after) {
|
||||
abortThunks.add(fn);
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public PostCommitFunction<T, R> exceptionally(Consumer<? super Throwable> fn) {
|
||||
Objects.requireNonNull(fn);
|
||||
exceptionallyThunk = fn;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -17,7 +18,6 @@ package net.helenus.core;
|
|||
|
||||
import com.datastax.driver.core.querybuilder.Clause;
|
||||
import com.datastax.driver.core.querybuilder.QueryBuilder;
|
||||
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.mapping.value.ColumnValuePreparer;
|
||||
import net.helenus.support.HelenusMappingException;
|
||||
|
@ -39,34 +39,34 @@ public final class Postulate<V> {
|
|||
public Clause getClause(HelenusPropertyNode node, ColumnValuePreparer valuePreparer) {
|
||||
|
||||
switch (operator) {
|
||||
case EQ :
|
||||
return QueryBuilder.eq(node.getColumnName(),
|
||||
valuePreparer.prepareColumnValue(values[0], node.getProperty()));
|
||||
case EQ:
|
||||
return QueryBuilder.eq(
|
||||
node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty()));
|
||||
|
||||
case IN :
|
||||
case IN:
|
||||
Object[] preparedValues = new Object[values.length];
|
||||
for (int i = 0; i != values.length; ++i) {
|
||||
preparedValues[i] = valuePreparer.prepareColumnValue(values[i], node.getProperty());
|
||||
}
|
||||
return QueryBuilder.in(node.getColumnName(), preparedValues);
|
||||
|
||||
case LT :
|
||||
return QueryBuilder.lt(node.getColumnName(),
|
||||
valuePreparer.prepareColumnValue(values[0], node.getProperty()));
|
||||
case LT:
|
||||
return QueryBuilder.lt(
|
||||
node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty()));
|
||||
|
||||
case LTE :
|
||||
return QueryBuilder.lte(node.getColumnName(),
|
||||
valuePreparer.prepareColumnValue(values[0], node.getProperty()));
|
||||
case LTE:
|
||||
return QueryBuilder.lte(
|
||||
node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty()));
|
||||
|
||||
case GT :
|
||||
return QueryBuilder.gt(node.getColumnName(),
|
||||
valuePreparer.prepareColumnValue(values[0], node.getProperty()));
|
||||
case GT:
|
||||
return QueryBuilder.gt(
|
||||
node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty()));
|
||||
|
||||
case GTE :
|
||||
return QueryBuilder.gte(node.getColumnName(),
|
||||
valuePreparer.prepareColumnValue(values[0], node.getProperty()));
|
||||
case GTE:
|
||||
return QueryBuilder.gte(
|
||||
node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty()));
|
||||
|
||||
default :
|
||||
default:
|
||||
throw new HelenusMappingException("unknown filter operation " + operator);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -15,20 +16,17 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import com.datastax.driver.core.querybuilder.BindMarker;
|
||||
import com.datastax.driver.core.querybuilder.QueryBuilder;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import com.datastax.driver.core.querybuilder.BindMarker;
|
||||
import com.datastax.driver.core.querybuilder.QueryBuilder;
|
||||
|
||||
import net.helenus.mapping.OrderingDirection;
|
||||
|
||||
/** Sugar methods for the queries */
|
||||
public final class Query {
|
||||
|
||||
private Query() {
|
||||
}
|
||||
private Query() {}
|
||||
|
||||
public static BindMarker marker() {
|
||||
return QueryBuilder.bindMarker();
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -15,16 +16,14 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import com.datastax.driver.core.*;
|
||||
import com.datastax.driver.core.querybuilder.IsNotNullClause;
|
||||
import com.datastax.driver.core.querybuilder.QueryBuilder;
|
||||
import com.datastax.driver.core.querybuilder.Select;
|
||||
import com.datastax.driver.core.schemabuilder.*;
|
||||
import com.datastax.driver.core.schemabuilder.Create.Options;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.mapping.*;
|
||||
import net.helenus.mapping.ColumnType;
|
||||
|
@ -35,8 +34,7 @@ import net.helenus.support.HelenusMappingException;
|
|||
|
||||
public final class SchemaUtil {
|
||||
|
||||
private SchemaUtil() {
|
||||
}
|
||||
private SchemaUtil() {}
|
||||
|
||||
public static RegularStatement use(String keyspace, boolean forceQuote) {
|
||||
if (forceQuote) {
|
||||
|
@ -59,23 +57,31 @@ public final class SchemaUtil {
|
|||
ColumnType columnType = prop.getColumnType();
|
||||
|
||||
if (columnType == ColumnType.PARTITION_KEY || columnType == ColumnType.CLUSTERING_COLUMN) {
|
||||
throw new HelenusMappingException("primary key columns are not supported in UserDefinedType for "
|
||||
+ prop.getPropertyName() + " in entity " + entity);
|
||||
throw new HelenusMappingException(
|
||||
"primary key columns are not supported in UserDefinedType for "
|
||||
+ prop.getPropertyName()
|
||||
+ " in entity "
|
||||
+ entity);
|
||||
}
|
||||
|
||||
try {
|
||||
prop.getDataType().addColumn(create, prop.getColumnName());
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new HelenusMappingException("invalid column name '" + prop.getColumnName() + "' in entity '"
|
||||
+ entity.getName().getName() + "'", e);
|
||||
throw new HelenusMappingException(
|
||||
"invalid column name '"
|
||||
+ prop.getColumnName()
|
||||
+ "' in entity '"
|
||||
+ entity.getName().getName()
|
||||
+ "'",
|
||||
e);
|
||||
}
|
||||
}
|
||||
|
||||
return create;
|
||||
}
|
||||
|
||||
public static List<SchemaStatement> alterUserType(UserType userType, HelenusEntity entity,
|
||||
boolean dropUnusedColumns) {
|
||||
public static List<SchemaStatement> alterUserType(
|
||||
UserType userType, HelenusEntity entity, boolean dropUnusedColumns) {
|
||||
|
||||
if (entity.getType() != HelenusEntityType.UDT) {
|
||||
throw new HelenusMappingException("expected UDT entity " + entity);
|
||||
|
@ -84,12 +90,13 @@ public final class SchemaUtil {
|
|||
List<SchemaStatement> result = new ArrayList<SchemaStatement>();
|
||||
|
||||
/**
|
||||
* TODO: In future replace SchemaBuilder.alterTable by SchemaBuilder.alterType
|
||||
* when it will exist
|
||||
* TODO: In future replace SchemaBuilder.alterTable by SchemaBuilder.alterType when it will
|
||||
* exist
|
||||
*/
|
||||
Alter alter = SchemaBuilder.alterTable(entity.getName().toCql());
|
||||
|
||||
final Set<String> visitedColumns = dropUnusedColumns ? new HashSet<String>() : Collections.<String>emptySet();
|
||||
final Set<String> visitedColumns =
|
||||
dropUnusedColumns ? new HashSet<String>() : Collections.<String>emptySet();
|
||||
|
||||
for (HelenusProperty prop : entity.getOrderedProperties()) {
|
||||
|
||||
|
@ -106,8 +113,9 @@ public final class SchemaUtil {
|
|||
}
|
||||
|
||||
DataType dataType = userType.getFieldType(columnName);
|
||||
SchemaStatement stmt = prop.getDataType().alterColumn(alter, prop.getColumnName(),
|
||||
optional(columnName, dataType));
|
||||
SchemaStatement stmt =
|
||||
prop.getDataType()
|
||||
.alterColumn(alter, prop.getColumnName(), optional(columnName, dataType));
|
||||
|
||||
if (stmt != null) {
|
||||
result.add(stmt);
|
||||
|
@ -147,32 +155,44 @@ public final class SchemaUtil {
|
|||
for (HelenusProperty prop : properties) {
|
||||
String columnName = prop.getColumnName().toCql();
|
||||
switch (prop.getColumnType()) {
|
||||
case PARTITION_KEY :
|
||||
case PARTITION_KEY:
|
||||
p.add(columnName);
|
||||
break;
|
||||
case CLUSTERING_COLUMN :
|
||||
case CLUSTERING_COLUMN:
|
||||
c.add(columnName);
|
||||
break;
|
||||
default :
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return "(" + ((p.size() > 1) ? "(" + String.join(", ", p) + ")" : p.get(0))
|
||||
+ ((c.size() > 0) ? ", " + ((c.size() > 1) ? "(" + String.join(", ", c) + ")" : c.get(0)) : "") + ")";
|
||||
if (p.size() == 0 && c.size() == 0)
|
||||
return "{"
|
||||
+ properties
|
||||
.stream()
|
||||
.map(HelenusProperty::getPropertyName)
|
||||
.collect(Collectors.joining(", "))
|
||||
+ "}";
|
||||
|
||||
return "("
|
||||
+ ((p.size() > 1) ? "(" + String.join(", ", p) + ")" : p.get(0))
|
||||
+ ((c.size() > 0)
|
||||
? ", " + ((c.size() > 1) ? "(" + String.join(", ", c) + ")" : c.get(0))
|
||||
: "")
|
||||
+ ")";
|
||||
}
|
||||
|
||||
public static SchemaStatement createMaterializedView(String keyspace, String viewName, HelenusEntity entity) {
|
||||
public static SchemaStatement createMaterializedView(
|
||||
String keyspace, String viewName, HelenusEntity entity) {
|
||||
if (entity.getType() != HelenusEntityType.VIEW) {
|
||||
throw new HelenusMappingException("expected view entity " + entity);
|
||||
}
|
||||
|
||||
if (entity == null) {
|
||||
throw new HelenusMappingException("no entity or table to select data");
|
||||
}
|
||||
|
||||
List<HelenusPropertyNode> props = new ArrayList<HelenusPropertyNode>();
|
||||
entity.getOrderedProperties().stream().map(p -> new HelenusPropertyNode(p, Optional.empty()))
|
||||
entity
|
||||
.getOrderedProperties()
|
||||
.stream()
|
||||
.map(p -> new HelenusPropertyNode(p, Optional.empty()))
|
||||
.forEach(p -> props.add(p));
|
||||
|
||||
Select.Selection selection = QueryBuilder.select();
|
||||
|
@ -189,20 +209,20 @@ public final class SchemaUtil {
|
|||
for (HelenusPropertyNode prop : props) {
|
||||
String columnName = prop.getColumnName();
|
||||
switch (prop.getProperty().getColumnType()) {
|
||||
case PARTITION_KEY :
|
||||
case PARTITION_KEY:
|
||||
where = where.and(new IsNotNullClause(columnName));
|
||||
break;
|
||||
|
||||
case CLUSTERING_COLUMN :
|
||||
case CLUSTERING_COLUMN:
|
||||
where = where.and(new IsNotNullClause(columnName));
|
||||
|
||||
ClusteringColumn clusteringColumn = prop.getProperty().getGetterMethod()
|
||||
.getAnnotation(ClusteringColumn.class);
|
||||
ClusteringColumn clusteringColumn =
|
||||
prop.getProperty().getGetterMethod().getAnnotation(ClusteringColumn.class);
|
||||
if (clusteringColumn != null && clusteringColumn.ordering() != null) {
|
||||
o.add(columnName + " " + clusteringColumn.ordering().cql());
|
||||
}
|
||||
break;
|
||||
default :
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -213,10 +233,12 @@ public final class SchemaUtil {
|
|||
if (o.size() > 0) {
|
||||
clustering = "WITH CLUSTERING ORDER BY (" + String.join(", ", o) + ")";
|
||||
}
|
||||
return new CreateMaterializedView(keyspace, viewName, where, primaryKey, clustering).ifNotExists();
|
||||
return new CreateMaterializedView(keyspace, viewName, where, primaryKey, clustering)
|
||||
.ifNotExists();
|
||||
}
|
||||
|
||||
public static SchemaStatement dropMaterializedView(String keyspace, String viewName, HelenusEntity entity) {
|
||||
public static SchemaStatement dropMaterializedView(
|
||||
String keyspace, String viewName, HelenusEntity entity) {
|
||||
return new DropMaterializedView(keyspace, viewName);
|
||||
}
|
||||
|
||||
|
@ -249,14 +271,15 @@ public final class SchemaUtil {
|
|||
|
||||
if (!clusteringColumns.isEmpty()) {
|
||||
Options options = create.withOptions();
|
||||
clusteringColumns
|
||||
.forEach(p -> options.clusteringOrder(p.getColumnName().toCql(), mapDirection(p.getOrdering())));
|
||||
clusteringColumns.forEach(
|
||||
p -> options.clusteringOrder(p.getColumnName().toCql(), mapDirection(p.getOrdering())));
|
||||
}
|
||||
|
||||
return create;
|
||||
}
|
||||
|
||||
public static List<SchemaStatement> alterTable(TableMetadata tmd, HelenusEntity entity, boolean dropUnusedColumns) {
|
||||
public static List<SchemaStatement> alterTable(
|
||||
TableMetadata tmd, HelenusEntity entity, boolean dropUnusedColumns) {
|
||||
|
||||
if (entity.getType() != HelenusEntityType.TABLE) {
|
||||
throw new HelenusMappingException("expected table entity " + entity);
|
||||
|
@ -266,7 +289,8 @@ public final class SchemaUtil {
|
|||
|
||||
Alter alter = SchemaBuilder.alterTable(entity.getName().toCql());
|
||||
|
||||
final Set<String> visitedColumns = dropUnusedColumns ? new HashSet<String>() : Collections.<String>emptySet();
|
||||
final Set<String> visitedColumns =
|
||||
dropUnusedColumns ? new HashSet<String>() : Collections.<String>emptySet();
|
||||
|
||||
for (HelenusProperty prop : entity.getOrderedProperties()) {
|
||||
|
||||
|
@ -283,8 +307,8 @@ public final class SchemaUtil {
|
|||
}
|
||||
|
||||
ColumnMetadata columnMetadata = tmd.getColumn(columnName);
|
||||
SchemaStatement stmt = prop.getDataType().alterColumn(alter, prop.getColumnName(),
|
||||
optional(columnMetadata));
|
||||
SchemaStatement stmt =
|
||||
prop.getDataType().alterColumn(alter, prop.getColumnName(), optional(columnMetadata));
|
||||
|
||||
if (stmt != null) {
|
||||
result.add(stmt);
|
||||
|
@ -314,28 +338,42 @@ public final class SchemaUtil {
|
|||
|
||||
public static SchemaStatement createIndex(HelenusProperty prop) {
|
||||
if (prop.caseSensitiveIndex()) {
|
||||
return SchemaBuilder.createIndex(prop.getIndexName().get().toCql()).ifNotExists()
|
||||
.onTable(prop.getEntity().getName().toCql()).andColumn(prop.getColumnName().toCql());
|
||||
return SchemaBuilder.createIndex(indexName(prop))
|
||||
.ifNotExists()
|
||||
.onTable(prop.getEntity().getName().toCql())
|
||||
.andColumn(prop.getColumnName().toCql());
|
||||
} else {
|
||||
return new CreateSasiIndex(prop.getIndexName().get().toCql()).ifNotExists()
|
||||
.onTable(prop.getEntity().getName().toCql()).andColumn(prop.getColumnName().toCql());
|
||||
return new CreateSasiIndex(prop.getIndexName().get().toCql())
|
||||
.ifNotExists()
|
||||
.onTable(prop.getEntity().getName().toCql())
|
||||
.andColumn(prop.getColumnName().toCql());
|
||||
}
|
||||
}
|
||||
|
||||
public static List<SchemaStatement> createIndexes(HelenusEntity entity) {
|
||||
|
||||
return entity.getOrderedProperties().stream().filter(p -> p.getIndexName().isPresent())
|
||||
.map(p -> SchemaUtil.createIndex(p)).collect(Collectors.toList());
|
||||
return entity
|
||||
.getOrderedProperties()
|
||||
.stream()
|
||||
.filter(p -> p.getIndexName().isPresent())
|
||||
.map(p -> SchemaUtil.createIndex(p))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
public static List<SchemaStatement> alterIndexes(TableMetadata tmd, HelenusEntity entity,
|
||||
boolean dropUnusedIndexes) {
|
||||
public static List<SchemaStatement> alterIndexes(
|
||||
TableMetadata tmd, HelenusEntity entity, boolean dropUnusedIndexes) {
|
||||
|
||||
List<SchemaStatement> list = new ArrayList<SchemaStatement>();
|
||||
|
||||
final Set<String> visitedColumns = dropUnusedIndexes ? new HashSet<String>() : Collections.<String>emptySet();
|
||||
final Set<String> visitedColumns =
|
||||
dropUnusedIndexes ? new HashSet<String>() : Collections.<String>emptySet();
|
||||
|
||||
entity.getOrderedProperties().stream().filter(p -> p.getIndexName().isPresent()).forEach(p -> {
|
||||
entity
|
||||
.getOrderedProperties()
|
||||
.stream()
|
||||
.filter(p -> p.getIndexName().isPresent())
|
||||
.forEach(
|
||||
p -> {
|
||||
String columnName = p.getColumnName().getName();
|
||||
|
||||
if (dropUnusedIndexes) {
|
||||
|
@ -356,9 +394,11 @@ public final class SchemaUtil {
|
|||
|
||||
if (dropUnusedIndexes) {
|
||||
|
||||
tmd.getColumns().stream()
|
||||
tmd.getColumns()
|
||||
.stream()
|
||||
.filter(c -> tmd.getIndex(c.getName()) != null && !visitedColumns.contains(c.getName()))
|
||||
.forEach(c -> {
|
||||
.forEach(
|
||||
c -> {
|
||||
list.add(SchemaBuilder.dropIndex(tmd.getIndex(c.getName()).getName()).ifExists());
|
||||
});
|
||||
}
|
||||
|
@ -367,14 +407,14 @@ public final class SchemaUtil {
|
|||
}
|
||||
|
||||
public static SchemaStatement dropIndex(HelenusProperty prop) {
|
||||
return SchemaBuilder.dropIndex(prop.getIndexName().get().toCql()).ifExists();
|
||||
return SchemaBuilder.dropIndex(indexName(prop)).ifExists();
|
||||
}
|
||||
|
||||
private static SchemaBuilder.Direction mapDirection(OrderingDirection o) {
|
||||
switch (o) {
|
||||
case ASC :
|
||||
case ASC:
|
||||
return SchemaBuilder.Direction.ASC;
|
||||
case DESC :
|
||||
case DESC:
|
||||
return SchemaBuilder.Direction.DESC;
|
||||
}
|
||||
throw new HelenusMappingException("unknown ordering " + o);
|
||||
|
@ -384,7 +424,10 @@ public final class SchemaUtil {
|
|||
|
||||
throw new HelenusMappingException(
|
||||
"only primitive types and Set,List,Map collections and UserDefinedTypes are allowed, unknown type for property '"
|
||||
+ prop.getPropertyName() + "' type is '" + prop.getJavaType() + "' in the entity "
|
||||
+ prop.getPropertyName()
|
||||
+ "' type is '"
|
||||
+ prop.getJavaType()
|
||||
+ "' in the entity "
|
||||
+ prop.getEntity());
|
||||
}
|
||||
|
||||
|
@ -423,4 +466,9 @@ public final class SchemaUtil {
|
|||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private static String indexName(HelenusProperty prop) {
|
||||
return prop.getEntity().getName().toCql() + "_" + prop.getIndexName().get().toCql();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -15,18 +16,16 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import com.codahale.metrics.MetricRegistry;
|
||||
import com.datastax.driver.core.*;
|
||||
import com.google.common.util.concurrent.MoreExecutors;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintStream;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.Executor;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import com.codahale.metrics.MetricRegistry;
|
||||
import com.datastax.driver.core.*;
|
||||
import com.google.common.util.concurrent.MoreExecutors;
|
||||
|
||||
import brave.Tracer;
|
||||
import javax.cache.CacheManager;
|
||||
import net.helenus.core.reflect.DslExportable;
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.mapping.HelenusEntityType;
|
||||
|
@ -40,26 +39,30 @@ import net.helenus.support.PackageUtil;
|
|||
public final class SessionInitializer extends AbstractSessionOperations {
|
||||
|
||||
private final Session session;
|
||||
private final List<Either<Object, Class<?>>> initList = new ArrayList<Either<Object, Class<?>>>();
|
||||
private CodecRegistry registry;
|
||||
private String usingKeyspace;
|
||||
private boolean showCql = false;
|
||||
private boolean showValues = true;
|
||||
private ConsistencyLevel consistencyLevel;
|
||||
private boolean idempotent = true;
|
||||
private boolean idempotent = false;
|
||||
private MetricRegistry metricRegistry = new MetricRegistry();
|
||||
private Tracer zipkinTracer;
|
||||
private PrintStream printStream = System.out;
|
||||
private Executor executor = MoreExecutors.directExecutor();
|
||||
private Class<? extends UnitOfWork> unitOfWorkClass = UnitOfWorkImpl.class;
|
||||
|
||||
private SessionRepositoryBuilder sessionRepository;
|
||||
|
||||
private boolean dropUnusedColumns = false;
|
||||
private boolean dropUnusedIndexes = false;
|
||||
|
||||
private KeyspaceMetadata keyspaceMetadata;
|
||||
|
||||
private final List<Either<Object, Class<?>>> initList = new ArrayList<Either<Object, Class<?>>>();
|
||||
private AutoDdl autoDdl = AutoDdl.UPDATE;
|
||||
private CacheManager cacheManager = null;
|
||||
|
||||
SessionInitializer(Session session, String keyspace) {
|
||||
this.session = session;
|
||||
this.usingKeyspace = keyspace;
|
||||
if (session != null) {
|
||||
this.sessionRepository = new SessionRepositoryBuilder(session);
|
||||
}
|
||||
}
|
||||
|
||||
SessionInitializer(Session session) {
|
||||
this.session = Objects.requireNonNull(session, "empty session");
|
||||
|
@ -107,30 +110,44 @@ public final class SessionInitializer extends AbstractSessionOperations {
|
|||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer showQueryValuesInLog(boolean showValues) {
|
||||
this.showValues = showValues;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer showQueryValuesInLog() {
|
||||
this.showValues = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
public boolean showValues() {
|
||||
return showValues;
|
||||
}
|
||||
|
||||
public SessionInitializer metricRegistry(MetricRegistry metricRegistry) {
|
||||
this.metricRegistry = metricRegistry;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer zipkinTracer(Tracer tracer) {
|
||||
this.zipkinTracer = tracer;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer setUnitOfWorkClass(Class<? extends UnitOfWork> e) {
|
||||
this.unitOfWorkClass = e;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer consistencyLevel(ConsistencyLevel consistencyLevel) {
|
||||
this.consistencyLevel = consistencyLevel;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer setCacheManager(CacheManager cacheManager) {
|
||||
this.cacheManager = cacheManager;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ConsistencyLevel getDefaultConsistencyLevel() {
|
||||
return consistencyLevel;
|
||||
}
|
||||
|
||||
public SessionInitializer setOperationsIdempotentByDefault() {
|
||||
this.idempotent = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SessionInitializer idempotentQueryExecution(boolean idempotent) {
|
||||
this.idempotent = idempotent;
|
||||
return this;
|
||||
|
@ -183,8 +200,11 @@ public final class SessionInitializer extends AbstractSessionOperations {
|
|||
|
||||
public SessionInitializer addPackage(String packageName) {
|
||||
try {
|
||||
PackageUtil.getClasses(packageName).stream().filter(c -> c.isInterface() && !c.isAnnotation())
|
||||
.forEach(clazz -> {
|
||||
PackageUtil.getClasses(packageName)
|
||||
.stream()
|
||||
.filter(c -> c.isInterface() && !c.isAnnotation())
|
||||
.forEach(
|
||||
clazz -> {
|
||||
initList.add(Either.right(clazz));
|
||||
});
|
||||
} catch (IOException | ClassNotFoundException e) {
|
||||
|
@ -229,8 +249,10 @@ public final class SessionInitializer extends AbstractSessionOperations {
|
|||
}
|
||||
|
||||
public SessionInitializer use(String keyspace) {
|
||||
if (session != null) {
|
||||
session.execute(SchemaUtil.use(keyspace, false));
|
||||
this.usingKeyspace = keyspace;
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -246,16 +268,28 @@ public final class SessionInitializer extends AbstractSessionOperations {
|
|||
|
||||
public synchronized HelenusSession get() {
|
||||
initialize();
|
||||
return new HelenusSession(session, usingKeyspace, registry, showCql, printStream, sessionRepository, executor,
|
||||
autoDdl == AutoDdl.CREATE_DROP, consistencyLevel, idempotent, unitOfWorkClass, metricRegistry,
|
||||
zipkinTracer);
|
||||
return new HelenusSession(
|
||||
session,
|
||||
usingKeyspace,
|
||||
registry,
|
||||
showCql,
|
||||
showValues,
|
||||
printStream,
|
||||
sessionRepository,
|
||||
executor,
|
||||
autoDdl == AutoDdl.CREATE_DROP,
|
||||
consistencyLevel,
|
||||
idempotent,
|
||||
cacheManager,
|
||||
metricRegistry);
|
||||
}
|
||||
|
||||
private void initialize() {
|
||||
|
||||
Objects.requireNonNull(usingKeyspace, "please define keyspace by 'use' operator");
|
||||
|
||||
initList.forEach((either) -> {
|
||||
initList.forEach(
|
||||
(either) -> {
|
||||
Class<?> iface = null;
|
||||
if (either.isLeft()) {
|
||||
iface = MappingUtil.getMappingInterface(either.getLeft());
|
||||
|
@ -264,61 +298,91 @@ public final class SessionInitializer extends AbstractSessionOperations {
|
|||
}
|
||||
|
||||
DslExportable dsl = (DslExportable) Helenus.dsl(iface);
|
||||
if (session != null) {
|
||||
dsl.setCassandraMetadataForHelenusSession(session.getCluster().getMetadata());
|
||||
}
|
||||
if (sessionRepository != null) {
|
||||
sessionRepository.add(dsl);
|
||||
}
|
||||
});
|
||||
|
||||
if (session == null) return;
|
||||
|
||||
TableOperations tableOps = new TableOperations(this, dropUnusedColumns, dropUnusedIndexes);
|
||||
UserTypeOperations userTypeOps = new UserTypeOperations(this, dropUnusedColumns);
|
||||
|
||||
switch (autoDdl) {
|
||||
case CREATE_DROP :
|
||||
case CREATE_DROP:
|
||||
|
||||
// Drop view first, otherwise a `DROP TABLE ...` will fail as the type is still
|
||||
// referenced
|
||||
// by a view.
|
||||
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.VIEW)
|
||||
sessionRepository
|
||||
.entities()
|
||||
.stream()
|
||||
.filter(e -> e.getType() == HelenusEntityType.VIEW)
|
||||
.forEach(e -> tableOps.dropView(e));
|
||||
|
||||
// Drop tables second, before DROP TYPE otherwise a `DROP TYPE ...` will fail as
|
||||
// the type is
|
||||
// still referenced by a table.
|
||||
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.TABLE)
|
||||
sessionRepository
|
||||
.entities()
|
||||
.stream()
|
||||
.filter(e -> e.getType() == HelenusEntityType.TABLE)
|
||||
.forEach(e -> tableOps.dropTable(e));
|
||||
|
||||
eachUserTypeInReverseOrder(userTypeOps, e -> userTypeOps.dropUserType(e));
|
||||
|
||||
// FALLTHRU to CREATE case (read: the absence of a `break;` statement here is
|
||||
// intentional!)
|
||||
case CREATE :
|
||||
case CREATE:
|
||||
eachUserTypeInOrder(userTypeOps, e -> userTypeOps.createUserType(e));
|
||||
|
||||
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.TABLE)
|
||||
sessionRepository
|
||||
.entities()
|
||||
.stream()
|
||||
.filter(e -> e.getType() == HelenusEntityType.TABLE)
|
||||
.forEach(e -> tableOps.createTable(e));
|
||||
|
||||
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.VIEW)
|
||||
sessionRepository
|
||||
.entities()
|
||||
.stream()
|
||||
.filter(e -> e.getType() == HelenusEntityType.VIEW)
|
||||
.forEach(e -> tableOps.createView(e));
|
||||
|
||||
break;
|
||||
|
||||
case VALIDATE :
|
||||
case VALIDATE:
|
||||
eachUserTypeInOrder(userTypeOps, e -> userTypeOps.validateUserType(getUserType(e), e));
|
||||
|
||||
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.TABLE)
|
||||
sessionRepository
|
||||
.entities()
|
||||
.stream()
|
||||
.filter(e -> e.getType() == HelenusEntityType.TABLE)
|
||||
.forEach(e -> tableOps.validateTable(getTableMetadata(e), e));
|
||||
|
||||
break;
|
||||
|
||||
case UPDATE :
|
||||
case UPDATE:
|
||||
eachUserTypeInOrder(userTypeOps, e -> userTypeOps.updateUserType(getUserType(e), e));
|
||||
|
||||
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.VIEW)
|
||||
sessionRepository
|
||||
.entities()
|
||||
.stream()
|
||||
.filter(e -> e.getType() == HelenusEntityType.VIEW)
|
||||
.forEach(e -> tableOps.dropView(e));
|
||||
|
||||
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.TABLE)
|
||||
sessionRepository
|
||||
.entities()
|
||||
.stream()
|
||||
.filter(e -> e.getType() == HelenusEntityType.TABLE)
|
||||
.forEach(e -> tableOps.updateTable(getTableMetadata(e), e));
|
||||
|
||||
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.VIEW)
|
||||
sessionRepository
|
||||
.entities()
|
||||
.stream()
|
||||
.filter(e -> e.getType() == HelenusEntityType.VIEW)
|
||||
.forEach(e -> tableOps.createView(e));
|
||||
break;
|
||||
}
|
||||
|
@ -330,27 +394,41 @@ public final class SessionInitializer extends AbstractSessionOperations {
|
|||
}
|
||||
}
|
||||
|
||||
private void eachUserTypeInOrder(UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) {
|
||||
private void eachUserTypeInOrder(
|
||||
UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) {
|
||||
|
||||
Set<HelenusEntity> processedSet = new HashSet<HelenusEntity>();
|
||||
Set<HelenusEntity> stack = new HashSet<HelenusEntity>();
|
||||
|
||||
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.UDT).forEach(e -> {
|
||||
sessionRepository
|
||||
.entities()
|
||||
.stream()
|
||||
.filter(e -> e.getType() == HelenusEntityType.UDT)
|
||||
.forEach(
|
||||
e -> {
|
||||
stack.clear();
|
||||
eachUserTypeInRecursion(e, processedSet, stack, userTypeOps, action);
|
||||
});
|
||||
}
|
||||
|
||||
private void eachUserTypeInReverseOrder(UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) {
|
||||
private void eachUserTypeInReverseOrder(
|
||||
UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) {
|
||||
ArrayDeque<HelenusEntity> deque = new ArrayDeque<>();
|
||||
eachUserTypeInOrder(userTypeOps, e -> deque.addFirst(e));
|
||||
deque.stream().forEach(e -> {
|
||||
deque
|
||||
.stream()
|
||||
.forEach(
|
||||
e -> {
|
||||
action.accept(e);
|
||||
});
|
||||
}
|
||||
|
||||
private void eachUserTypeInRecursion(HelenusEntity e, Set<HelenusEntity> processedSet, Set<HelenusEntity> stack,
|
||||
UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) {
|
||||
private void eachUserTypeInRecursion(
|
||||
HelenusEntity e,
|
||||
Set<HelenusEntity> processedSet,
|
||||
Set<HelenusEntity> stack,
|
||||
UserTypeOperations userTypeOps,
|
||||
Consumer<? super HelenusEntity> action) {
|
||||
|
||||
stack.add(e);
|
||||
|
||||
|
@ -371,7 +449,8 @@ public final class SessionInitializer extends AbstractSessionOperations {
|
|||
|
||||
private KeyspaceMetadata getKeyspaceMetadata() {
|
||||
if (keyspaceMetadata == null) {
|
||||
keyspaceMetadata = session.getCluster().getMetadata().getKeyspace(usingKeyspace.toLowerCase());
|
||||
keyspaceMetadata =
|
||||
session.getCluster().getMetadata().getKeyspace(usingKeyspace.toLowerCase());
|
||||
}
|
||||
return keyspaceMetadata;
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -15,11 +16,9 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import java.util.Collection;
|
||||
|
||||
import com.datastax.driver.core.UserType;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
|
||||
import java.util.Collection;
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
|
||||
public final class SessionRepository {
|
||||
|
@ -32,7 +31,8 @@ public final class SessionRepository {
|
|||
|
||||
userTypeMap = ImmutableMap.<String, UserType>builder().putAll(builder.getUserTypeMap()).build();
|
||||
|
||||
entityMap = ImmutableMap.<Class<?>, HelenusEntity>builder().putAll(builder.getEntityMap()).build();
|
||||
entityMap =
|
||||
ImmutableMap.<Class<?>, HelenusEntity>builder().putAll(builder.getEntityMap()).build();
|
||||
}
|
||||
|
||||
public UserType findUserType(String name) {
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -15,17 +16,15 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
import com.datastax.driver.core.Session;
|
||||
import com.datastax.driver.core.UDTValue;
|
||||
import com.datastax.driver.core.UserType;
|
||||
import com.google.common.collect.HashMultimap;
|
||||
import com.google.common.collect.Multimap;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.mapping.HelenusEntityType;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
|
@ -35,7 +34,8 @@ import net.helenus.support.HelenusMappingException;
|
|||
|
||||
public final class SessionRepositoryBuilder {
|
||||
|
||||
private static final Optional<HelenusEntityType> OPTIONAL_UDT = Optional.of(HelenusEntityType.UDT);
|
||||
private static final Optional<HelenusEntityType> OPTIONAL_UDT =
|
||||
Optional.of(HelenusEntityType.UDT);
|
||||
|
||||
private final Map<Class<?>, HelenusEntity> entityMap = new HashMap<Class<?>, HelenusEntity>();
|
||||
|
||||
|
@ -99,7 +99,8 @@ public final class SessionRepositoryBuilder {
|
|||
entity = helenusEntity;
|
||||
|
||||
if (type.isPresent() && entity.getType() != type.get()) {
|
||||
throw new HelenusMappingException("unexpected entity type " + entity.getType() + " for " + entity);
|
||||
throw new HelenusMappingException(
|
||||
"unexpected entity type " + entity.getType() + " for " + entity);
|
||||
}
|
||||
|
||||
HelenusEntity concurrentEntity = entityMap.putIfAbsent(iface, entity);
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -15,11 +16,9 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import com.datastax.driver.core.TableMetadata;
|
||||
import com.datastax.driver.core.schemabuilder.SchemaStatement;
|
||||
|
||||
import java.util.List;
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.support.HelenusException;
|
||||
|
||||
|
@ -29,26 +28,30 @@ public final class TableOperations {
|
|||
private final boolean dropUnusedColumns;
|
||||
private final boolean dropUnusedIndexes;
|
||||
|
||||
public TableOperations(AbstractSessionOperations sessionOps, boolean dropUnusedColumns, boolean dropUnusedIndexes) {
|
||||
public TableOperations(
|
||||
AbstractSessionOperations sessionOps, boolean dropUnusedColumns, boolean dropUnusedIndexes) {
|
||||
this.sessionOps = sessionOps;
|
||||
this.dropUnusedColumns = dropUnusedColumns;
|
||||
this.dropUnusedIndexes = dropUnusedIndexes;
|
||||
}
|
||||
|
||||
public void createTable(HelenusEntity entity) {
|
||||
sessionOps.execute(SchemaUtil.createTable(entity), true);
|
||||
sessionOps.execute(SchemaUtil.createTable(entity));
|
||||
executeBatch(SchemaUtil.createIndexes(entity));
|
||||
}
|
||||
|
||||
public void dropTable(HelenusEntity entity) {
|
||||
sessionOps.execute(SchemaUtil.dropTable(entity), true);
|
||||
sessionOps.execute(SchemaUtil.dropTable(entity));
|
||||
}
|
||||
|
||||
public void validateTable(TableMetadata tmd, HelenusEntity entity) {
|
||||
|
||||
if (tmd == null) {
|
||||
throw new HelenusException(
|
||||
"table does not exists " + entity.getName() + "for entity " + entity.getMappingInterface());
|
||||
"table does not exists "
|
||||
+ entity.getName()
|
||||
+ "for entity "
|
||||
+ entity.getMappingInterface());
|
||||
}
|
||||
|
||||
List<SchemaStatement> list = SchemaUtil.alterTable(tmd, entity, dropUnusedColumns);
|
||||
|
@ -57,7 +60,10 @@ public final class TableOperations {
|
|||
|
||||
if (!list.isEmpty()) {
|
||||
throw new HelenusException(
|
||||
"schema changed for entity " + entity.getMappingInterface() + ", apply this command: " + list);
|
||||
"schema changed for entity "
|
||||
+ entity.getMappingInterface()
|
||||
+ ", apply this command: "
|
||||
+ list);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -73,14 +79,15 @@ public final class TableOperations {
|
|||
|
||||
public void createView(HelenusEntity entity) {
|
||||
sessionOps.execute(
|
||||
SchemaUtil.createMaterializedView(sessionOps.usingKeyspace(), entity.getName().toCql(), entity), true);
|
||||
// executeBatch(SchemaUtil.createIndexes(entity)); NOTE: Unfortunately C* 3.10
|
||||
// does not yet support 2i on materialized views.
|
||||
SchemaUtil.createMaterializedView(
|
||||
sessionOps.usingKeyspace(), entity.getName().toCql(), entity));
|
||||
// executeBatch(SchemaUtil.createIndexes(entity)); NOTE: Unfortunately C* 3.10 does not yet support 2i on materialized views.
|
||||
}
|
||||
|
||||
public void dropView(HelenusEntity entity) {
|
||||
sessionOps.execute(
|
||||
SchemaUtil.dropMaterializedView(sessionOps.usingKeyspace(), entity.getName().toCql(), entity), true);
|
||||
SchemaUtil.dropMaterializedView(
|
||||
sessionOps.usingKeyspace(), entity.getName().toCql(), entity));
|
||||
}
|
||||
|
||||
public void updateView(TableMetadata tmd, HelenusEntity entity) {
|
||||
|
@ -95,8 +102,6 @@ public final class TableOperations {
|
|||
|
||||
private void executeBatch(List<SchemaStatement> list) {
|
||||
|
||||
list.forEach(s -> {
|
||||
sessionOps.execute(s, true);
|
||||
});
|
||||
list.forEach(s -> sessionOps.execute(s));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -15,54 +16,801 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import static net.helenus.core.HelenusSession.deleted;
|
||||
|
||||
import com.google.common.base.Stopwatch;
|
||||
import com.google.common.collect.HashBasedTable;
|
||||
import com.google.common.collect.Table;
|
||||
import com.google.common.collect.TreeTraverser;
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.stream.Collectors;
|
||||
import javax.cache.Cache;
|
||||
import javax.cache.CacheManager;
|
||||
import javax.cache.configuration.CacheEntryListenerConfiguration;
|
||||
import javax.cache.configuration.Configuration;
|
||||
import javax.cache.integration.CacheLoader;
|
||||
import javax.cache.integration.CacheLoaderException;
|
||||
import javax.cache.integration.CompletionListener;
|
||||
import javax.cache.processor.EntryProcessor;
|
||||
import javax.cache.processor.EntryProcessorException;
|
||||
import javax.cache.processor.EntryProcessorResult;
|
||||
|
||||
import net.helenus.core.cache.CacheUtil;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.core.cache.MapCache;
|
||||
import net.helenus.core.operation.AbstractOperation;
|
||||
import net.helenus.core.operation.BatchOperation;
|
||||
import net.helenus.mapping.MappingUtil;
|
||||
import net.helenus.support.CheckedRunnable;
|
||||
import net.helenus.support.Either;
|
||||
import net.helenus.support.HelenusException;
|
||||
import org.apache.commons.lang3.SerializationUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public interface UnitOfWork<X extends Exception> extends AutoCloseable {
|
||||
/** Encapsulates the concept of a "transaction" as a unit-of-work. */
|
||||
public class UnitOfWork implements AutoCloseable {
|
||||
private static final Logger LOG = LoggerFactory.getLogger(UnitOfWork.class);
|
||||
|
||||
public final UnitOfWork parent;
|
||||
protected final List<UnitOfWork> nested = new ArrayList<>();
|
||||
protected final Table<String, String, Either<Object, List<Facet>>> cache = HashBasedTable.create();
|
||||
protected final EvictTrackingMapCache<String, Object> statementCache;
|
||||
protected final HelenusSession session;
|
||||
protected String purpose;
|
||||
protected List<String> nestedPurposes = new ArrayList<String>();
|
||||
protected String info;
|
||||
protected int cacheHits = 0;
|
||||
protected int cacheMisses = 0;
|
||||
protected int databaseLookups = 0;
|
||||
protected final Stopwatch elapsedTime;
|
||||
protected Map<String, Double> databaseTime = new HashMap<>();
|
||||
protected double cacheLookupTimeMSecs = 0.0;
|
||||
private List<CheckedRunnable> commitThunks = new ArrayList<>();
|
||||
private List<CheckedRunnable> abortThunks = new ArrayList<>();
|
||||
private Consumer<? super Throwable> exceptionallyThunk;
|
||||
private List<CompletableFuture<?>> asyncOperationFutures = new ArrayList<CompletableFuture<?>>();
|
||||
private boolean aborted = false;
|
||||
private boolean committed = false;
|
||||
private long committedAt = 0L;
|
||||
private BatchOperation batch;
|
||||
|
||||
public UnitOfWork(HelenusSession session) {
|
||||
this(session, null);
|
||||
}
|
||||
|
||||
public UnitOfWork(HelenusSession session, UnitOfWork parent) {
|
||||
Objects.requireNonNull(session, "containing session cannot be null");
|
||||
|
||||
this.parent = parent;
|
||||
if (parent != null) {
|
||||
parent.addNestedUnitOfWork(this);
|
||||
}
|
||||
this.session = session;
|
||||
CacheLoader<String, Object> cacheLoader = null;
|
||||
if (parent != null) {
|
||||
cacheLoader =
|
||||
new CacheLoader<String, Object>() {
|
||||
|
||||
Cache<String, Object> cache = parent.getCache();
|
||||
|
||||
@Override
|
||||
public Object load(String key) throws CacheLoaderException {
|
||||
return cache.get(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Object> loadAll(Iterable<? extends String> keys)
|
||||
throws CacheLoaderException {
|
||||
Map<String, Object> kvp = new HashMap<String, Object>();
|
||||
for (String key : keys) {
|
||||
kvp.put(key, cache.get(key));
|
||||
}
|
||||
return kvp;
|
||||
}
|
||||
};
|
||||
}
|
||||
this.elapsedTime = Stopwatch.createUnstarted();
|
||||
this.statementCache = new EvictTrackingMapCache<String, Object>(null, "UOW(" + hashCode() + ")", cacheLoader, true);
|
||||
}
|
||||
|
||||
public void addDatabaseTime(String name, Stopwatch amount) {
|
||||
Double time = databaseTime.get(name);
|
||||
if (time == null) {
|
||||
databaseTime.put(name, (double) amount.elapsed(TimeUnit.MICROSECONDS));
|
||||
} else {
|
||||
databaseTime.put(name, time + amount.elapsed(TimeUnit.MICROSECONDS));
|
||||
}
|
||||
}
|
||||
|
||||
public void addCacheLookupTime(Stopwatch amount) {
|
||||
cacheLookupTimeMSecs += amount.elapsed(TimeUnit.MICROSECONDS);
|
||||
}
|
||||
|
||||
public void addNestedUnitOfWork(UnitOfWork uow) {
|
||||
synchronized (nested) {
|
||||
nested.add(uow);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Marks the beginning of a transactional section of work. Will write a record
|
||||
* to the shared write-ahead log.
|
||||
* Marks the beginning of a transactional section of work. Will write a
|
||||
* recordCacheAndDatabaseOperationCount to the shared write-ahead log.
|
||||
*
|
||||
* @return the handle used to commit or abort the work.
|
||||
*/
|
||||
UnitOfWork<X> begin();
|
||||
public synchronized UnitOfWork begin() {
|
||||
elapsedTime.start();
|
||||
// log.record(txn::start)
|
||||
return this;
|
||||
}
|
||||
|
||||
void addNestedUnitOfWork(UnitOfWork<X> uow);
|
||||
public String getPurpose() {
|
||||
return purpose;
|
||||
}
|
||||
|
||||
public UnitOfWork setPurpose(String purpose) {
|
||||
this.purpose = purpose;
|
||||
return this;
|
||||
}
|
||||
|
||||
public void addFuture(CompletableFuture<?> future) {
|
||||
asyncOperationFutures.add(future);
|
||||
}
|
||||
|
||||
public void setInfo(String info) {
|
||||
this.info = info;
|
||||
}
|
||||
|
||||
public void recordCacheAndDatabaseOperationCount(int cache, int ops) {
|
||||
if (cache > 0) {
|
||||
cacheHits += cache;
|
||||
} else {
|
||||
cacheMisses += Math.abs(cache);
|
||||
}
|
||||
if (ops > 0) {
|
||||
databaseLookups += ops;
|
||||
}
|
||||
}
|
||||
|
||||
public String logTimers(String what) {
|
||||
double e = (double) elapsedTime.elapsed(TimeUnit.MICROSECONDS) / 1000.0;
|
||||
double d = 0.0;
|
||||
double c = cacheLookupTimeMSecs / 1000.0;
|
||||
double fc = (c / e) * 100.0;
|
||||
String database = "";
|
||||
if (databaseTime.size() > 0) {
|
||||
List<String> dbt = new ArrayList<>(databaseTime.size());
|
||||
for (Map.Entry<String, Double> dt : databaseTime.entrySet()) {
|
||||
double t = dt.getValue() / 1000.0;
|
||||
d += t;
|
||||
dbt.add(String.format("%s took %,.3fms %,2.2f%%", dt.getKey(), t, (t / e) * 100.0));
|
||||
}
|
||||
double fd = (d / e) * 100.0;
|
||||
database =
|
||||
String.format(
|
||||
", %d quer%s (%,.3fms %,2.2f%% - %s)",
|
||||
databaseLookups, (databaseLookups > 1) ? "ies" : "y", d, fd, String.join(", ", dbt));
|
||||
}
|
||||
String cache = "";
|
||||
if (cacheLookupTimeMSecs > 0) {
|
||||
int cacheLookups = cacheHits + cacheMisses;
|
||||
cache =
|
||||
String.format(
|
||||
" with %d cache lookup%s (%,.3fms %,2.2f%% - %,d hit, %,d miss)",
|
||||
cacheLookups, cacheLookups > 1 ? "s" : "", c, fc, cacheHits, cacheMisses);
|
||||
}
|
||||
String da = "";
|
||||
if (databaseTime.size() > 0 || cacheLookupTimeMSecs > 0) {
|
||||
double dat = d + c;
|
||||
double daf = (dat / e) * 100;
|
||||
da =
|
||||
String.format(
|
||||
" consuming %,.3fms for data access, or %,2.2f%% of total UOW time.", dat, daf);
|
||||
}
|
||||
String x = nestedPurposes.stream().distinct().collect(Collectors.joining(", "));
|
||||
String n =
|
||||
nested
|
||||
.stream()
|
||||
.map(uow -> String.valueOf(uow.hashCode()))
|
||||
.collect(Collectors.joining(", "));
|
||||
String s =
|
||||
String.format(
|
||||
Locale.US,
|
||||
"UOW(%s%s) %s in %,.3fms%s%s%s%s%s%s",
|
||||
hashCode(),
|
||||
(nested.size() > 0 ? ", [" + n + "]" : ""),
|
||||
what,
|
||||
e,
|
||||
cache,
|
||||
database,
|
||||
da,
|
||||
(purpose == null ? "" : " " + purpose),
|
||||
(nestedPurposes.isEmpty()) ? "" : ", " + x,
|
||||
(info == null) ? "" : " " + info);
|
||||
return s;
|
||||
}
|
||||
|
||||
private void applyPostCommitFunctions(String what, List<CheckedRunnable> thunks, Consumer<? super Throwable> exceptionallyThunk) {
|
||||
if (!thunks.isEmpty()) {
|
||||
for (CheckedRunnable f : thunks) {
|
||||
try {
|
||||
f.run();
|
||||
} catch (Throwable t) {
|
||||
if (exceptionallyThunk != null) {
|
||||
exceptionallyThunk.accept(t);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public Optional<Object> cacheLookup(List<Facet> facets) {
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
Optional<Object> result = Optional.empty();
|
||||
for (Facet facet : facets) {
|
||||
if (!facet.fixed()) {
|
||||
String columnName = facet.name() + "==" + facet.value();
|
||||
Either<Object, List<Facet>> eitherValue = cache.get(tableName, columnName);
|
||||
if (eitherValue != null) {
|
||||
Object value = deleted;
|
||||
if (eitherValue.isLeft()) {
|
||||
value = eitherValue.getLeft();
|
||||
}
|
||||
return Optional.of(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Be sure to check all enclosing UnitOfWork caches as well, we may be nested.
|
||||
result = checkParentCache(facets);
|
||||
if (result.isPresent()) {
|
||||
Object r = result.get();
|
||||
Class<?> iface = MappingUtil.getMappingInterface(r);
|
||||
if (Helenus.entity(iface).isDraftable()) {
|
||||
cacheUpdate(r, facets);
|
||||
} else {
|
||||
cacheUpdate(SerializationUtils.<Serializable>clone((Serializable) r), facets);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private Optional<Object> checkParentCache(List<Facet> facets) {
|
||||
Optional<Object> result = Optional.empty();
|
||||
if (parent != null) {
|
||||
result = parent.checkParentCache(facets);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public List<Facet> cacheEvict(List<Facet> facets) {
|
||||
Either<Object, List<Facet>> deletedObjectFacets = Either.right(facets);
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
Optional<Object> optionalValue = cacheLookup(facets);
|
||||
|
||||
for (Facet facet : facets) {
|
||||
if (!facet.fixed()) {
|
||||
String columnKey = facet.name() + "==" + facet.value();
|
||||
// mark the value identified by the facet to `deleted`
|
||||
cache.put(tableName, columnKey, deletedObjectFacets);
|
||||
}
|
||||
}
|
||||
|
||||
// Now, look for other row/col pairs that referenced the same object, mark them
|
||||
// `deleted` if the cache had a value before we added the deleted marker objects.
|
||||
if (optionalValue.isPresent()) {
|
||||
Object value = optionalValue.get();
|
||||
cache
|
||||
.columnKeySet()
|
||||
.forEach(
|
||||
columnKey -> {
|
||||
Either<Object, List<Facet>> eitherCachedValue = cache.get(tableName, columnKey);
|
||||
if (eitherCachedValue.isLeft()) {
|
||||
Object cachedValue = eitherCachedValue.getLeft();
|
||||
if (cachedValue == value) {
|
||||
cache.put(tableName, columnKey, deletedObjectFacets);
|
||||
String[] parts = columnKey.split("==");
|
||||
facets.add(new Facet<String>(parts[0], parts[1]));
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
return facets;
|
||||
}
|
||||
|
||||
public Cache<String, Object> getCache() {
|
||||
return statementCache;
|
||||
}
|
||||
|
||||
public Object cacheUpdate(Object value, List<Facet> facets) {
|
||||
Object result = null;
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
for (Facet facet : facets) {
|
||||
if (!facet.fixed()) {
|
||||
if (facet.alone()) {
|
||||
String columnName = facet.name() + "==" + facet.value();
|
||||
if (result == null) result = cache.get(tableName, columnName);
|
||||
cache.put(tableName, columnName, Either.left(value));
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public void batch(AbstractOperation s) {
|
||||
if (batch == null) {
|
||||
batch = new BatchOperation(session);
|
||||
}
|
||||
batch.add(s);
|
||||
}
|
||||
|
||||
private Iterator<UnitOfWork> getChildNodes() {
|
||||
return nested.iterator();
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks to see if the work performed between calling begin and now can be
|
||||
* committed or not.
|
||||
* Checks to see if the work performed between calling begin and now can be committed or not.
|
||||
*
|
||||
* @return a function from which to chain work that only happens when commit is
|
||||
* successful
|
||||
* @throws X
|
||||
* when the work overlaps with other concurrent writers.
|
||||
* @return a function from which to chain work that only happens when commit is successful
|
||||
* @throws HelenusException when the work overlaps with other concurrent writers.
|
||||
*/
|
||||
PostCommitFunction<Void, Void> commit() throws X;
|
||||
public synchronized PostCommitFunction<Void, Void> commit() throws HelenusException {
|
||||
|
||||
if (isDone()) {
|
||||
return PostCommitFunction.NULL_ABORT;
|
||||
}
|
||||
|
||||
// Only the outer-most UOW batches statements for commit time, execute them.
|
||||
if (batch != null) {
|
||||
committedAt = batch.sync(this); //TODO(gburd): update cache with writeTime...
|
||||
}
|
||||
|
||||
// All nested UnitOfWork should be committed (not aborted) before calls to
|
||||
// commit, check.
|
||||
boolean canCommit = true;
|
||||
TreeTraverser<UnitOfWork> traverser = TreeTraverser.using(node -> node::getChildNodes);
|
||||
for (UnitOfWork uow : traverser.postOrderTraversal(this)) {
|
||||
if (this != uow) {
|
||||
canCommit &= (!uow.aborted && uow.committed);
|
||||
}
|
||||
}
|
||||
|
||||
if (!canCommit) {
|
||||
|
||||
if (parent == null) {
|
||||
|
||||
// Apply all post-commit abort functions, this is the outer-most UnitOfWork.
|
||||
traverser
|
||||
.postOrderTraversal(this)
|
||||
.forEach(
|
||||
uow -> {
|
||||
applyPostCommitFunctions("aborted", abortThunks, exceptionallyThunk);
|
||||
});
|
||||
|
||||
elapsedTime.stop();
|
||||
if (LOG.isInfoEnabled()) {
|
||||
LOG.info(logTimers("aborted"));
|
||||
}
|
||||
}
|
||||
|
||||
return PostCommitFunction.NULL_ABORT;
|
||||
} else {
|
||||
committed = true;
|
||||
aborted = false;
|
||||
|
||||
if (parent == null) {
|
||||
|
||||
// Apply all post-commit commit functions, this is the outer-most UnitOfWork.
|
||||
traverser
|
||||
.postOrderTraversal(this)
|
||||
.forEach(
|
||||
uow -> {
|
||||
applyPostCommitFunctions("committed", uow.commitThunks, exceptionallyThunk);
|
||||
});
|
||||
|
||||
// Merge our statement cache into the session cache if it exists.
|
||||
CacheManager cacheManager = session.getCacheManager();
|
||||
if (cacheManager != null) {
|
||||
for (Map.Entry<String, Object> entry :
|
||||
(Set<Map.Entry<String, Object>>) statementCache.<Map>unwrap(Map.class).entrySet()) {
|
||||
String[] keyParts = entry.getKey().split("\\.");
|
||||
if (keyParts.length == 2) {
|
||||
String cacheName = keyParts[0];
|
||||
String key = keyParts[1];
|
||||
if (!StringUtils.isBlank(cacheName) && !StringUtils.isBlank(key)) {
|
||||
Cache<Object, Object> cache = cacheManager.getCache(cacheName);
|
||||
if (cache != null) {
|
||||
Object value = entry.getValue();
|
||||
if (value == deleted) {
|
||||
cache.remove(key);
|
||||
} else {
|
||||
cache.put(key.toString(), value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Merge our cache into the session cache.
|
||||
session.mergeCache(cache);
|
||||
|
||||
// Spoil any lingering futures that may be out there.
|
||||
asyncOperationFutures.forEach(
|
||||
f ->
|
||||
f.completeExceptionally(
|
||||
new HelenusException(
|
||||
"Futures must be resolved before their unit of work has committed/aborted.")));
|
||||
|
||||
elapsedTime.stop();
|
||||
if (LOG.isInfoEnabled()) {
|
||||
LOG.info(logTimers("committed"));
|
||||
}
|
||||
|
||||
return PostCommitFunction.NULL_COMMIT;
|
||||
} else {
|
||||
// Merge cache and statistics into parent if there is one.
|
||||
parent.statementCache.putAll(statementCache.<Map>unwrap(Map.class));
|
||||
parent.statementCache.removeAll(statementCache.getDeletions());
|
||||
parent.mergeCache(cache);
|
||||
parent.addBatched(batch);
|
||||
if (purpose != null) {
|
||||
parent.nestedPurposes.add(purpose);
|
||||
}
|
||||
parent.cacheHits += cacheHits;
|
||||
parent.cacheMisses += cacheMisses;
|
||||
parent.databaseLookups += databaseLookups;
|
||||
parent.cacheLookupTimeMSecs += cacheLookupTimeMSecs;
|
||||
for (Map.Entry<String, Double> dt : databaseTime.entrySet()) {
|
||||
String name = dt.getKey();
|
||||
if (parent.databaseTime.containsKey(name)) {
|
||||
double t = parent.databaseTime.get(name);
|
||||
parent.databaseTime.put(name, t + dt.getValue());
|
||||
} else {
|
||||
parent.databaseTime.put(name, dt.getValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// TODO(gburd): hopefully we'll be able to detect conflicts here and so we'd want to...
|
||||
// else {
|
||||
// Constructor<T> ctor = clazz.getConstructor(conflictExceptionClass);
|
||||
// T object = ctor.newInstance(new Object[] { String message });
|
||||
// }
|
||||
return new PostCommitFunction<Void, Void>(commitThunks, abortThunks, exceptionallyThunk, true);
|
||||
}
|
||||
|
||||
private void addBatched(BatchOperation batchArg) {
|
||||
if (batchArg != null) {
|
||||
if (this.batch == null) {
|
||||
this.batch = batchArg;
|
||||
} else {
|
||||
this.batch.addAll(batchArg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Explicitly abort the work within this unit of work. Any nested aborted unit
|
||||
* of work will trigger the entire unit of work to commit.
|
||||
* Explicitly abort the work within this unit of work. Any nested aborted unit of work will
|
||||
* trigger the entire unit of work to commit.
|
||||
*/
|
||||
void abort();
|
||||
public synchronized void abort() {
|
||||
if (!aborted) {
|
||||
aborted = true;
|
||||
|
||||
boolean hasAborted();
|
||||
// Spoil any pending futures created within the context of this unit of work.
|
||||
asyncOperationFutures.forEach(
|
||||
f ->
|
||||
f.completeExceptionally(
|
||||
new HelenusException(
|
||||
"Futures must be resolved before their unit of work has committed/aborted.")));
|
||||
|
||||
boolean hasCommitted();
|
||||
TreeTraverser<UnitOfWork> traverser = TreeTraverser.using(node -> node::getChildNodes);
|
||||
traverser
|
||||
.postOrderTraversal(this)
|
||||
.forEach(
|
||||
uow -> {
|
||||
applyPostCommitFunctions("aborted", uow.abortThunks, exceptionallyThunk);
|
||||
uow.abortThunks.clear();
|
||||
});
|
||||
|
||||
Optional<Object> cacheLookup(List<Facet> facets);
|
||||
if (parent == null) {
|
||||
if (elapsedTime.isRunning()) {
|
||||
elapsedTime.stop();
|
||||
}
|
||||
if (LOG.isInfoEnabled()) {
|
||||
LOG.info(logTimers("aborted"));
|
||||
}
|
||||
}
|
||||
|
||||
void cacheUpdate(Object pojo, List<Facet> facets);
|
||||
// TODO(gburd): when we integrate the transaction support we'll need to...
|
||||
// log.record(txn::abort)
|
||||
// cache.invalidateSince(txn::start time)
|
||||
}
|
||||
}
|
||||
|
||||
UnitOfWork setPurpose(String purpose);
|
||||
private void mergeCache(Table<String, String, Either<Object, List<Facet>>> from) {
|
||||
Table<String, String, Either<Object, List<Facet>>> to = this.cache;
|
||||
from.rowMap()
|
||||
.forEach(
|
||||
(rowKey, columnMap) -> {
|
||||
columnMap.forEach(
|
||||
(columnKey, value) -> {
|
||||
if (to.contains(rowKey, columnKey)) {
|
||||
to.put(
|
||||
rowKey,
|
||||
columnKey,
|
||||
Either.left(
|
||||
CacheUtil.merge(
|
||||
to.get(rowKey, columnKey).getLeft(),
|
||||
from.get(rowKey, columnKey).getLeft())));
|
||||
} else {
|
||||
to.put(rowKey, columnKey, from.get(rowKey, columnKey));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
Stopwatch getExecutionTimer();
|
||||
public boolean isDone() {
|
||||
return aborted || committed;
|
||||
}
|
||||
|
||||
Stopwatch getCacheLookupTimer();
|
||||
public String describeConflicts() {
|
||||
return "it's complex...";
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws HelenusException {
|
||||
// Closing a UnitOfWork will abort iff we've not already aborted or committed this unit of work.
|
||||
if (aborted == false && committed == false) {
|
||||
abort();
|
||||
}
|
||||
}
|
||||
|
||||
public boolean hasAborted() {
|
||||
return aborted;
|
||||
}
|
||||
|
||||
public boolean hasCommitted() {
|
||||
return committed;
|
||||
}
|
||||
|
||||
public long committedAt() {
|
||||
return committedAt;
|
||||
}
|
||||
|
||||
private static class EvictTrackingMapCache<K, V> implements Cache<K, V> {
|
||||
private final Set<K> deletes;
|
||||
private final Cache<K, V> delegate;
|
||||
|
||||
public EvictTrackingMapCache(CacheManager manager, String name, CacheLoader<K, V> cacheLoader,
|
||||
boolean isReadThrough) {
|
||||
deletes = Collections.synchronizedSet(new HashSet<>());
|
||||
delegate = new MapCache<>(manager, name, cacheLoader, isReadThrough);
|
||||
}
|
||||
|
||||
/** Non-interface method; should only be called by UnitOfWork when merging to an enclosing UnitOfWork. */
|
||||
public Set<K> getDeletions() {
|
||||
return new HashSet<>(deletes);
|
||||
}
|
||||
|
||||
@Override
|
||||
public V get(K key) {
|
||||
if (deletes.contains(key)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return delegate.get(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<K, V> getAll(Set<? extends K> keys) {
|
||||
Set<? extends K> clonedKeys = new HashSet<>(keys);
|
||||
clonedKeys.removeAll(deletes);
|
||||
return delegate.getAll(clonedKeys);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean containsKey(K key) {
|
||||
if (deletes.contains(key)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return delegate.containsKey(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void loadAll(Set<? extends K> keys, boolean replaceExistingValues, CompletionListener listener) {
|
||||
Set<? extends K> clonedKeys = new HashSet<>(keys);
|
||||
clonedKeys.removeAll(deletes);
|
||||
delegate.loadAll(clonedKeys, replaceExistingValues, listener);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void put(K key, V value) {
|
||||
if (deletes.contains(key)) {
|
||||
deletes.remove(key);
|
||||
}
|
||||
|
||||
delegate.put(key, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public V getAndPut(K key, V value) {
|
||||
if (deletes.contains(key)) {
|
||||
deletes.remove(key);
|
||||
}
|
||||
|
||||
return delegate.getAndPut(key, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void putAll(Map<? extends K, ? extends V> map) {
|
||||
deletes.removeAll(map.keySet());
|
||||
delegate.putAll(map);
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized boolean putIfAbsent(K key, V value) {
|
||||
if (!delegate.containsKey(key) && deletes.contains(key)) {
|
||||
deletes.remove(key);
|
||||
}
|
||||
|
||||
return delegate.putIfAbsent(key, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean remove(K key) {
|
||||
boolean removed = delegate.remove(key);
|
||||
deletes.add(key);
|
||||
return removed;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean remove(K key, V value) {
|
||||
boolean removed = delegate.remove(key, value);
|
||||
if (removed) {
|
||||
deletes.add(key);
|
||||
}
|
||||
|
||||
return removed;
|
||||
}
|
||||
|
||||
@Override
|
||||
public V getAndRemove(K key) {
|
||||
V value = delegate.getAndRemove(key);
|
||||
deletes.add(key);
|
||||
return value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void removeAll(Set<? extends K> keys) {
|
||||
Set<? extends K> cloneKeys = new HashSet<>(keys);
|
||||
delegate.removeAll(cloneKeys);
|
||||
deletes.addAll(cloneKeys);
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public synchronized void removeAll() {
|
||||
Map<K, V> impl = delegate.unwrap(Map.class);
|
||||
Set<K> keys = impl.keySet();
|
||||
delegate.removeAll();
|
||||
deletes.addAll(keys);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
delegate.clear();
|
||||
// TODO(gburd): all parents too
|
||||
deletes.clear();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean replace(K key, V oldValue, V newValue) {
|
||||
if (deletes.contains(key)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return delegate.replace(key, oldValue, newValue);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean replace(K key, V value) {
|
||||
if (deletes.contains(key)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return delegate.replace(key, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public V getAndReplace(K key, V value) {
|
||||
if (deletes.contains(key)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return delegate.getAndReplace(key, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <C extends Configuration<K, V>> C getConfiguration(Class<C> clazz) {
|
||||
return delegate.getConfiguration(clazz);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T invoke(K key, EntryProcessor<K, V, T> processor, Object... arguments)
|
||||
throws EntryProcessorException {
|
||||
if (deletes.contains(key)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return delegate.invoke(key, processor, arguments);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> Map<K, EntryProcessorResult<T>> invokeAll(Set<? extends K> keys, EntryProcessor<K, V, T> processor,
|
||||
Object... arguments) {
|
||||
Set<? extends K> clonedKeys = new HashSet<>(keys);
|
||||
clonedKeys.removeAll(deletes);
|
||||
return delegate.invokeAll(clonedKeys, processor, arguments);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return delegate.getName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public CacheManager getCacheManager() {
|
||||
return delegate.getCacheManager();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
delegate.close();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isClosed() {
|
||||
return delegate.isClosed();
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T unwrap(Class<T> clazz) {
|
||||
return delegate.unwrap(clazz);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void registerCacheEntryListener(CacheEntryListenerConfiguration<K, V> cacheEntryListenerConfiguration) {
|
||||
delegate.registerCacheEntryListener(cacheEntryListenerConfiguration);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deregisterCacheEntryListener(CacheEntryListenerConfiguration<K, V> cacheEntryListenerConfiguration) {
|
||||
delegate.deregisterCacheEntryListener(cacheEntryListenerConfiguration);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<Entry<K, V>> iterator() {
|
||||
return delegate.iterator();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,26 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import net.helenus.support.HelenusException;
|
||||
|
||||
class UnitOfWorkImpl extends AbstractUnitOfWork<HelenusException> {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public UnitOfWorkImpl(HelenusSession session, UnitOfWork parent) {
|
||||
super(session, (AbstractUnitOfWork<HelenusException>) parent);
|
||||
}
|
||||
}
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -15,11 +16,9 @@
|
|||
*/
|
||||
package net.helenus.core;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import com.datastax.driver.core.UserType;
|
||||
import com.datastax.driver.core.schemabuilder.SchemaStatement;
|
||||
|
||||
import java.util.List;
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.support.HelenusException;
|
||||
|
||||
|
@ -35,12 +34,12 @@ public final class UserTypeOperations {
|
|||
|
||||
public void createUserType(HelenusEntity entity) {
|
||||
|
||||
sessionOps.execute(SchemaUtil.createUserType(entity), true);
|
||||
sessionOps.execute(SchemaUtil.createUserType(entity));
|
||||
}
|
||||
|
||||
public void dropUserType(HelenusEntity entity) {
|
||||
|
||||
sessionOps.execute(SchemaUtil.dropUserType(entity), true);
|
||||
sessionOps.execute(SchemaUtil.dropUserType(entity));
|
||||
}
|
||||
|
||||
public void validateUserType(UserType userType, HelenusEntity entity) {
|
||||
|
@ -54,7 +53,10 @@ public final class UserTypeOperations {
|
|||
|
||||
if (!list.isEmpty()) {
|
||||
throw new HelenusException(
|
||||
"schema changed for entity " + entity.getMappingInterface() + ", apply this command: " + list);
|
||||
"schema changed for entity "
|
||||
+ entity.getMappingInterface()
|
||||
+ ", apply this command: "
|
||||
+ list);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -70,8 +72,6 @@ public final class UserTypeOperations {
|
|||
|
||||
private void executeBatch(List<SchemaStatement> list) {
|
||||
|
||||
list.forEach(s -> {
|
||||
sessionOps.execute(s, true);
|
||||
});
|
||||
list.forEach(s -> sessionOps.execute(s));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -22,5 +23,4 @@ import java.lang.annotation.Target;
|
|||
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(ElementType.TYPE)
|
||||
public @interface Cacheable {
|
||||
}
|
||||
public @interface Cacheable {}
|
||||
|
|
|
@ -1,17 +1,36 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package net.helenus.core.annotation;
|
||||
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import net.helenus.core.ConflictingUnitOfWorkException;
|
||||
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(ElementType.METHOD)
|
||||
public @interface Retry {
|
||||
|
||||
Class<? extends Exception>[] on() default ConflictingUnitOfWorkException.class;
|
||||
Class<? extends Exception>[] on() default {
|
||||
ConflictingUnitOfWorkException.class, TimeoutException.class
|
||||
};
|
||||
|
||||
int times() default 3;
|
||||
}
|
||||
|
|
|
@ -1,8 +1,25 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package net.helenus.core.aspect;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.Arrays;
|
||||
|
||||
import net.helenus.core.annotation.Retry;
|
||||
import org.apache.commons.lang3.exception.ExceptionUtils;
|
||||
import org.aspectj.lang.ProceedingJoinPoint;
|
||||
import org.aspectj.lang.annotation.Around;
|
||||
|
@ -13,8 +30,6 @@ import org.slf4j.LoggerFactory;
|
|||
import org.springframework.core.annotation.AnnotationUtils;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import net.helenus.core.annotation.Retry;
|
||||
|
||||
@Aspect
|
||||
public class RetryAspect {
|
||||
|
||||
|
@ -39,8 +54,8 @@ public class RetryAspect {
|
|||
return tryProceeding(pjp, times, retryOn);
|
||||
}
|
||||
|
||||
private Object tryProceeding(ProceedingJoinPoint pjp, int times, Class<? extends Throwable>[] retryOn)
|
||||
throws Throwable {
|
||||
private Object tryProceeding(
|
||||
ProceedingJoinPoint pjp, int times, Class<? extends Throwable>[] retryOn) throws Throwable {
|
||||
try {
|
||||
return proceed(pjp);
|
||||
} catch (Throwable throwable) {
|
||||
|
@ -73,7 +88,7 @@ public class RetryAspect {
|
|||
return retryAnnotation;
|
||||
}
|
||||
|
||||
Class[] argClasses = new Class[pjp.getArgs().length];
|
||||
Class<?>[] argClasses = new Class[pjp.getArgs().length];
|
||||
for (int i = 0; i < pjp.getArgs().length; i++) {
|
||||
argClasses[i] = pjp.getArgs()[i].getClass();
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ package net.helenus.core.aspect;
|
|||
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.Arrays;
|
||||
|
||||
import net.helenus.core.annotation.Retry;
|
||||
import org.apache.commons.lang3.exception.ExceptionUtils;
|
||||
import org.aspectj.lang.ProceedingJoinPoint;
|
||||
import org.aspectj.lang.annotation.Around;
|
||||
|
@ -13,8 +13,6 @@ import org.slf4j.LoggerFactory;
|
|||
import org.springframework.core.annotation.AnnotationUtils;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import net.helenus.core.annotation.Retry;
|
||||
|
||||
@Aspect
|
||||
public class RetryConcurrentUnitOfWorkAspect {
|
||||
|
||||
|
@ -39,8 +37,8 @@ public class RetryConcurrentUnitOfWorkAspect {
|
|||
return tryProceeding(pjp, times, retryOn);
|
||||
}
|
||||
|
||||
private Object tryProceeding(ProceedingJoinPoint pjp, int times, Class<? extends Throwable>[] retryOn)
|
||||
throws Throwable {
|
||||
private Object tryProceeding(
|
||||
ProceedingJoinPoint pjp, int times, Class<? extends Throwable>[] retryOn) throws Throwable {
|
||||
try {
|
||||
return proceed(pjp);
|
||||
} catch (Throwable throwable) {
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -15,24 +16,45 @@
|
|||
*/
|
||||
package net.helenus.core.cache;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
|
||||
public class BoundFacet extends Facet<String> {
|
||||
private final Map<HelenusProperty, Object> properties;
|
||||
|
||||
BoundFacet(String name, Map<HelenusProperty, Object> properties) {
|
||||
super(name,
|
||||
public BoundFacet(HelenusProperty property, Object value) {
|
||||
super(property.getPropertyName(), value == null ? null : value.toString());
|
||||
this.properties = new HashMap<HelenusProperty, Object>(1);
|
||||
this.properties.put(property, value);
|
||||
}
|
||||
|
||||
public Set<HelenusProperty> getProperties() {
|
||||
return properties.keySet();
|
||||
}
|
||||
|
||||
public BoundFacet(String name, Map<HelenusProperty, Object> properties) {
|
||||
super(
|
||||
name,
|
||||
(properties.keySet().size() > 1)
|
||||
? "[" + String.join(", ",
|
||||
properties.keySet().stream().map(key -> properties.get(key).toString())
|
||||
? "["
|
||||
+ String.join(
|
||||
", ",
|
||||
properties
|
||||
.keySet()
|
||||
.stream()
|
||||
.map(key -> properties.get(key).toString())
|
||||
.collect(Collectors.toSet()))
|
||||
+ "]"
|
||||
: String.join("", properties.keySet().stream().map(key -> properties.get(key).toString())
|
||||
: String.join(
|
||||
"",
|
||||
properties
|
||||
.keySet()
|
||||
.stream()
|
||||
.map(key -> properties.get(key).toString())
|
||||
.collect(Collectors.toSet())));
|
||||
this.properties = properties;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
190
src/main/java/net/helenus/core/cache/CacheUtil.java
vendored
190
src/main/java/net/helenus/core/cache/CacheUtil.java
vendored
|
@ -1,15 +1,23 @@
|
|||
package net.helenus.core.cache;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.reflect.Entity;
|
||||
import net.helenus.core.reflect.MapExportable;
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
import net.helenus.mapping.MappingUtil;
|
||||
import net.helenus.mapping.value.BeanColumnValueProvider;
|
||||
|
||||
public class CacheUtil {
|
||||
|
||||
public static List<String[]> combinations(List<String> items) {
|
||||
int n = items.size();
|
||||
if (n > 20 || n < 0)
|
||||
throw new IllegalArgumentException(n + " is out of range");
|
||||
if (n > 20) throw new IllegalArgumentException(n + " is out of range");
|
||||
long e = Math.round(Math.pow(2, n));
|
||||
List<String[]> out = new ArrayList<String[]>((int) e - 1);
|
||||
for (int k = 1; k <= items.size(); k++) {
|
||||
|
@ -18,7 +26,8 @@ public class CacheUtil {
|
|||
return out;
|
||||
}
|
||||
|
||||
private static void kCombinations(List<String> items, int n, int k, String[] arr, List<String[]> out) {
|
||||
private static void kCombinations(
|
||||
List<String> items, int n, int k, String[] arr, List<String[]> out) {
|
||||
if (k == 0) {
|
||||
out.add(arr.clone());
|
||||
} else {
|
||||
|
@ -29,21 +38,184 @@ public class CacheUtil {
|
|||
}
|
||||
}
|
||||
|
||||
public static List<String> flatKeys(String table, List<Facet> facets) {
|
||||
return flattenFacets(facets)
|
||||
.stream()
|
||||
.map(
|
||||
combination -> {
|
||||
return table + "." + Arrays.toString(combination);
|
||||
})
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
public static List<String[]> flattenFacets(List<Facet> facets) {
|
||||
List<String[]> combinations = CacheUtil.combinations(
|
||||
facets.stream().filter(facet -> !facet.fixed()).filter(facet -> facet.value() != null).map(facet -> {
|
||||
List<String[]> combinations =
|
||||
CacheUtil.combinations(
|
||||
facets
|
||||
.stream()
|
||||
.filter(facet -> !facet.fixed())
|
||||
.filter(facet -> facet.value() != null)
|
||||
.map(
|
||||
facet -> {
|
||||
return facet.name() + "==" + facet.value();
|
||||
}).collect(Collectors.toList()));
|
||||
})
|
||||
.collect(Collectors.toList()));
|
||||
// TODO(gburd): rework so as to not generate the combinations at all rather than filter
|
||||
facets =
|
||||
facets
|
||||
.stream()
|
||||
.filter(f -> !f.fixed())
|
||||
.filter(f -> !f.alone() || !f.combined())
|
||||
.collect(Collectors.toList());
|
||||
for (Facet facet : facets) {
|
||||
combinations =
|
||||
combinations
|
||||
.stream()
|
||||
.filter(
|
||||
combo -> {
|
||||
// When used alone, this facet is not distinct so don't use it as a key.
|
||||
if (combo.length == 1) {
|
||||
if (!facet.alone() && combo[0].startsWith(facet.name() + "==")) {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
if (!facet.combined()) {
|
||||
for (String c : combo) {
|
||||
// Don't use this facet in combination with others to create keys.
|
||||
if (c.startsWith(facet.name() + "==")) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
})
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
return combinations;
|
||||
}
|
||||
|
||||
public static Object merge(Object to, Object from) {
|
||||
return to; // TODO(gburd): yeah...
|
||||
/** Merge changed values in the map behind `from` into `to`. */
|
||||
public static Object merge(Object t, Object f) {
|
||||
HelenusEntity entity = Helenus.resolve(MappingUtil.getMappingInterface(t));
|
||||
|
||||
if (t == f) return t;
|
||||
if (f == null) return t;
|
||||
if (t == null) return f;
|
||||
|
||||
if (t instanceof MapExportable
|
||||
&& t instanceof Entity
|
||||
&& f instanceof MapExportable
|
||||
&& f instanceof Entity) {
|
||||
Entity to = (Entity) t;
|
||||
Entity from = (Entity) f;
|
||||
Map<String, Object> toValueMap = ((MapExportable) to).toMap();
|
||||
Map<String, Object> fromValueMap = ((MapExportable) from).toMap();
|
||||
for (HelenusProperty prop : entity.getOrderedProperties()) {
|
||||
switch (prop.getColumnType()) {
|
||||
case PARTITION_KEY:
|
||||
case CLUSTERING_COLUMN:
|
||||
continue;
|
||||
default:
|
||||
Object toVal = BeanColumnValueProvider.INSTANCE.getColumnValue(to, -1, prop, false);
|
||||
Object fromVal = BeanColumnValueProvider.INSTANCE.getColumnValue(from, -1, prop, false);
|
||||
String ttlKey = ttlKey(prop);
|
||||
String writeTimeKey = writeTimeKey(prop);
|
||||
int[] toTtlI = (int[]) toValueMap.get(ttlKey);
|
||||
int toTtl = (toTtlI != null) ? toTtlI[0] : 0;
|
||||
Long toWriteTime = (Long) toValueMap.get(writeTimeKey);
|
||||
int[] fromTtlI = (int[]) fromValueMap.get(ttlKey);
|
||||
int fromTtl = (fromTtlI != null) ? fromTtlI[0] : 0;
|
||||
Long fromWriteTime = (Long) fromValueMap.get(writeTimeKey);
|
||||
|
||||
if (toVal != null) {
|
||||
if (fromVal != null) {
|
||||
if (toVal == fromVal) {
|
||||
// Case: object identity
|
||||
// Goal: ensure write time and ttl are also in sync
|
||||
if (fromWriteTime != null
|
||||
&& fromWriteTime != 0L
|
||||
&& (toWriteTime == null || fromWriteTime > toWriteTime)) {
|
||||
((MapExportable) to).put(writeTimeKey, fromWriteTime);
|
||||
}
|
||||
if (fromTtl > 0 && fromTtl > toTtl) {
|
||||
((MapExportable) to).put(ttlKey, fromTtl);
|
||||
}
|
||||
} else if (fromWriteTime != null && fromWriteTime != 0L) {
|
||||
// Case: to exists and from exists
|
||||
// Goal: copy over from -> to iff from.writeTime > to.writeTime
|
||||
if (toWriteTime != null && toWriteTime != 0L) {
|
||||
if (fromWriteTime > toWriteTime) {
|
||||
((MapExportable) to).put(prop.getPropertyName(), fromVal);
|
||||
((MapExportable) to).put(writeTimeKey, fromWriteTime);
|
||||
if (fromTtl > 0) {
|
||||
((MapExportable) to).put(ttlKey, fromTtl);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
((MapExportable) to).put(prop.getPropertyName(), fromVal);
|
||||
((MapExportable) to).put(writeTimeKey, fromWriteTime);
|
||||
if (fromTtl > 0) {
|
||||
((MapExportable) to).put(ttlKey, fromTtl);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (toWriteTime == null || toWriteTime == 0L) {
|
||||
// Caution, entering grey area...
|
||||
if (!toVal.equals(fromVal)) {
|
||||
// dangerous waters here, values diverge without information that enables resolution,
|
||||
// policy (for now) is to move value from -> to anyway.
|
||||
((MapExportable) to).put(prop.getPropertyName(), fromVal);
|
||||
if (fromTtl > 0) {
|
||||
((MapExportable) to).put(ttlKey, fromTtl);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Case: from exists, but to doesn't (it's null)
|
||||
// Goal: copy over from -> to, include ttl and writeTime if present
|
||||
if (fromVal != null) {
|
||||
((MapExportable) to).put(prop.getPropertyName(), fromVal);
|
||||
if (fromWriteTime != null && fromWriteTime != 0L) {
|
||||
((MapExportable) to).put(writeTimeKey, fromWriteTime);
|
||||
}
|
||||
if (fromTtl > 0) {
|
||||
((MapExportable) to).put(ttlKey, fromTtl);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return to;
|
||||
}
|
||||
return t;
|
||||
}
|
||||
|
||||
public static String schemaName(List<Facet> facets) {
|
||||
return facets.stream().filter(Facet::fixed).map(facet -> facet.value().toString())
|
||||
return facets
|
||||
.stream()
|
||||
.filter(Facet::fixed)
|
||||
.map(facet -> facet.value().toString())
|
||||
.collect(Collectors.joining("."));
|
||||
}
|
||||
|
||||
public static String writeTimeKey(HelenusProperty prop) {
|
||||
return writeTimeKey(prop.getColumnName().toCql(false));
|
||||
}
|
||||
|
||||
public static String ttlKey(HelenusProperty prop) {
|
||||
return ttlKey(prop.getColumnName().toCql(false));
|
||||
}
|
||||
|
||||
public static String writeTimeKey(String columnName) {
|
||||
String key = "_" + columnName + "_writeTime";
|
||||
return key.toLowerCase();
|
||||
}
|
||||
|
||||
public static String ttlKey(String columnName) {
|
||||
String key = "_" + columnName + "_ttl";
|
||||
return key.toLowerCase();
|
||||
}
|
||||
}
|
||||
|
|
24
src/main/java/net/helenus/core/cache/Facet.java
vendored
24
src/main/java/net/helenus/core/cache/Facet.java
vendored
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,13 +17,13 @@
|
|||
|
||||
package net.helenus.core.cache;
|
||||
|
||||
/**
|
||||
* An Entity is identifiable via one or more Facets
|
||||
*/
|
||||
/** An Entity is identifiable via one or more Facets */
|
||||
public class Facet<T> {
|
||||
private final String name;
|
||||
private T value;
|
||||
private boolean fixed = false;
|
||||
private boolean alone = true;
|
||||
private boolean combined = true;
|
||||
|
||||
public Facet(String name) {
|
||||
this.name = name;
|
||||
|
@ -50,4 +51,19 @@ public class Facet<T> {
|
|||
return fixed;
|
||||
}
|
||||
|
||||
public void setUniquelyIdentifyingWhenAlone(boolean alone) {
|
||||
this.alone = alone;
|
||||
}
|
||||
|
||||
public void setUniquelyIdentifyingWhenCombined(boolean combined) {
|
||||
this.combined = combined;
|
||||
}
|
||||
|
||||
public boolean alone() {
|
||||
return alone;
|
||||
}
|
||||
|
||||
public boolean combined() {
|
||||
return combined;
|
||||
}
|
||||
}
|
||||
|
|
463
src/main/java/net/helenus/core/cache/MapCache.java
vendored
Normal file
463
src/main/java/net/helenus/core/cache/MapCache.java
vendored
Normal file
|
@ -0,0 +1,463 @@
|
|||
package net.helenus.core.cache;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import javax.cache.Cache;
|
||||
import javax.cache.CacheManager;
|
||||
import javax.cache.configuration.CacheEntryListenerConfiguration;
|
||||
import javax.cache.configuration.Configuration;
|
||||
import javax.cache.event.CacheEntryRemovedListener;
|
||||
import javax.cache.integration.CacheLoader;
|
||||
import javax.cache.integration.CompletionListener;
|
||||
import javax.cache.processor.EntryProcessor;
|
||||
import javax.cache.processor.EntryProcessorException;
|
||||
import javax.cache.processor.EntryProcessorResult;
|
||||
import javax.cache.processor.MutableEntry;
|
||||
|
||||
public class MapCache<K, V> implements Cache<K, V> {
|
||||
private final CacheManager manager;
|
||||
private final String name;
|
||||
private Map<K, V> map = new ConcurrentHashMap<>();
|
||||
private Set<CacheEntryRemovedListener<K, V>> cacheEntryRemovedListeners = new HashSet<>();
|
||||
private CacheLoader<K, V> cacheLoader = null;
|
||||
private boolean isReadThrough = false;
|
||||
|
||||
private static class MapConfiguration<K, V> implements Configuration<K, V> {
|
||||
private static final long serialVersionUID = 6093947542772516209L;
|
||||
|
||||
@Override
|
||||
public Class<K> getKeyType() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<V> getValueType() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isStoreByValue() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public MapCache(
|
||||
CacheManager manager, String name, CacheLoader<K, V> cacheLoader, boolean isReadThrough) {
|
||||
this.manager = manager;
|
||||
this.name = name;
|
||||
this.cacheLoader = cacheLoader;
|
||||
this.isReadThrough = isReadThrough;
|
||||
}
|
||||
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public V get(K key) {
|
||||
V value = null;
|
||||
synchronized (map) {
|
||||
value = map.get(key);
|
||||
if (value == null && isReadThrough && cacheLoader != null) {
|
||||
V loadedValue = cacheLoader.load(key);
|
||||
if (loadedValue != null) {
|
||||
map.put(key, loadedValue);
|
||||
value = loadedValue;
|
||||
}
|
||||
}
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public Map<K, V> getAll(Set<? extends K> keys) {
|
||||
Map<K, V> result = null;
|
||||
synchronized (map) {
|
||||
result = new HashMap<K, V>(keys.size());
|
||||
Iterator<? extends K> it = keys.iterator();
|
||||
while (it.hasNext()) {
|
||||
K key = it.next();
|
||||
V value = map.get(key);
|
||||
if (value != null) {
|
||||
result.put(key, value);
|
||||
it.remove();
|
||||
}
|
||||
}
|
||||
if (keys.size() != 0 && isReadThrough && cacheLoader != null) {
|
||||
Map<K, V> loadedValues = cacheLoader.loadAll(keys);
|
||||
for (Map.Entry<K, V> entry : loadedValues.entrySet()) {
|
||||
V v = entry.getValue();
|
||||
if (v != null) {
|
||||
K k = entry.getKey();
|
||||
map.put(k, v);
|
||||
result.put(k, v);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public boolean containsKey(K key) {
|
||||
return map.containsKey(key);
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void loadAll(
|
||||
Set<? extends K> keys, boolean replaceExistingValues, CompletionListener completionListener) {
|
||||
if (cacheLoader != null) {
|
||||
try {
|
||||
synchronized (map) {
|
||||
Map<K, V> loadedValues = cacheLoader.loadAll(keys);
|
||||
for (Map.Entry<K, V> entry : loadedValues.entrySet()) {
|
||||
V value = entry.getValue();
|
||||
K key = entry.getKey();
|
||||
if (value != null) {
|
||||
boolean existsCurrently = map.containsKey(key);
|
||||
if (!existsCurrently || replaceExistingValues) {
|
||||
map.put(key, value);
|
||||
keys.remove(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
if (completionListener != null) {
|
||||
completionListener.onException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (completionListener != null) {
|
||||
if (keys.isEmpty()) {
|
||||
completionListener.onCompletion();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void put(K key, V value) {
|
||||
map.put(key, value);
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public V getAndPut(K key, V value) {
|
||||
V result = null;
|
||||
synchronized (map) {
|
||||
result = map.get(key);
|
||||
if (result == null && isReadThrough && cacheLoader != null) {
|
||||
V loadedValue = cacheLoader.load(key);
|
||||
if (loadedValue != null) {
|
||||
result = loadedValue;
|
||||
}
|
||||
}
|
||||
map.put(key, value);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void putAll(Map<? extends K, ? extends V> map) {
|
||||
synchronized (map) {
|
||||
for (Map.Entry<? extends K, ? extends V> entry : map.entrySet()) {
|
||||
this.map.put(entry.getKey(), entry.getValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public boolean putIfAbsent(K key, V value) {
|
||||
synchronized (map) {
|
||||
if (!map.containsKey(key)) {
|
||||
map.put(key, value);
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public boolean remove(K key) {
|
||||
boolean removed = false;
|
||||
synchronized (map) {
|
||||
removed = map.remove(key) != null;
|
||||
notifyRemovedListeners(key);
|
||||
}
|
||||
return removed;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public boolean remove(K key, V oldValue) {
|
||||
synchronized (map) {
|
||||
V value = map.get(key);
|
||||
if (value != null && oldValue.equals(value)) {
|
||||
map.remove(key);
|
||||
notifyRemovedListeners(key);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public V getAndRemove(K key) {
|
||||
synchronized (map) {
|
||||
V oldValue = null;
|
||||
oldValue = map.get(key);
|
||||
map.remove(key);
|
||||
notifyRemovedListeners(key);
|
||||
return oldValue;
|
||||
}
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public boolean replace(K key, V oldValue, V newValue) {
|
||||
synchronized (map) {
|
||||
V value = map.get(key);
|
||||
if (value != null && oldValue.equals(value)) {
|
||||
map.put(key, newValue);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public boolean replace(K key, V value) {
|
||||
synchronized (map) {
|
||||
if (map.containsKey(key)) {
|
||||
map.put(key, value);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public V getAndReplace(K key, V value) {
|
||||
synchronized (map) {
|
||||
V oldValue = map.get(key);
|
||||
if (value != null && value.equals(oldValue)) {
|
||||
map.put(key, value);
|
||||
return oldValue;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void removeAll(Set<? extends K> keys) {
|
||||
synchronized (map) {
|
||||
Iterator<? extends K> it = keys.iterator();
|
||||
while (it.hasNext()) {
|
||||
K key = it.next();
|
||||
if (map.containsKey(key)) {
|
||||
map.remove(key);
|
||||
} else {
|
||||
it.remove();
|
||||
}
|
||||
}
|
||||
}
|
||||
notifyRemovedListeners(keys);
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void removeAll() {
|
||||
synchronized (map) {
|
||||
Set<K> keys = map.keySet();
|
||||
map.clear();
|
||||
notifyRemovedListeners(keys);
|
||||
}
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void clear() {
|
||||
map.clear();
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public <C extends Configuration<K, V>> C getConfiguration(Class<C> clazz) {
|
||||
if (!MapConfiguration.class.isAssignableFrom(clazz)) {
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public <T> T invoke(K key, EntryProcessor<K, V, T> entryProcessor, Object... arguments)
|
||||
throws EntryProcessorException {
|
||||
// TODO
|
||||
return null;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public <T> Map<K, EntryProcessorResult<T>> invokeAll(
|
||||
Set<? extends K> keys, EntryProcessor<K, V, T> entryProcessor, Object... arguments) {
|
||||
synchronized (map) {
|
||||
for (K key : keys) {
|
||||
V value = map.get(key);
|
||||
if (value != null) {
|
||||
entryProcessor.process(
|
||||
new MutableEntry<K, V>() {
|
||||
@Override
|
||||
public boolean exists() {
|
||||
return map.containsKey(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void remove() {
|
||||
synchronized (map) {
|
||||
V value = map.get(key);
|
||||
if (value != null) {
|
||||
map.remove(key);
|
||||
notifyRemovedListeners(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public K getKey() {
|
||||
return key;
|
||||
}
|
||||
|
||||
@Override
|
||||
public V getValue() {
|
||||
return map.get(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T unwrap(Class<T> clazz) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setValue(V value) {
|
||||
map.put(key, value);
|
||||
}
|
||||
},
|
||||
arguments);
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public CacheManager getCacheManager() {
|
||||
return manager;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void close() {}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public boolean isClosed() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public <T> T unwrap(Class<T> clazz) {
|
||||
if (Map.class.isAssignableFrom(clazz)) {
|
||||
return (T) map;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void registerCacheEntryListener(
|
||||
CacheEntryListenerConfiguration<K, V> cacheEntryListenerConfiguration) {
|
||||
//cacheEntryRemovedListeners.add(cacheEntryListenerConfiguration.getCacheEntryListenerFactory().create());
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void deregisterCacheEntryListener(
|
||||
CacheEntryListenerConfiguration<K, V> cacheEntryListenerConfiguration) {}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public Iterator<Entry<K, V>> iterator() {
|
||||
synchronized (map) {
|
||||
return new Iterator<Entry<K, V>>() {
|
||||
|
||||
Iterator<Map.Entry<K, V>> entries = map.entrySet().iterator();
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return entries.hasNext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Entry<K, V> next() {
|
||||
Map.Entry<K, V> entry = entries.next();
|
||||
return new Entry<K, V>() {
|
||||
K key = entry.getKey();
|
||||
V value = entry.getValue();
|
||||
|
||||
@Override
|
||||
public K getKey() {
|
||||
return key;
|
||||
}
|
||||
|
||||
@Override
|
||||
public V getValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T unwrap(Class<T> clazz) {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public void remove() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private void notifyRemovedListeners(K key) {
|
||||
// if (cacheEntryRemovedListeners != null) {
|
||||
// cacheEntryRemovedListeners.forEach(listener -> listener.onRemoved())
|
||||
// }
|
||||
}
|
||||
|
||||
private void notifyRemovedListeners(Set<? extends K> keys) {}
|
||||
}
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -19,23 +20,36 @@ import java.util.ArrayList;
|
|||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import net.helenus.core.SchemaUtil;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
|
||||
public class UnboundFacet extends Facet<String> {
|
||||
|
||||
private final List<HelenusProperty> properties;
|
||||
private final boolean alone;
|
||||
private final boolean combined;
|
||||
|
||||
public UnboundFacet(List<HelenusProperty> properties) {
|
||||
public UnboundFacet(List<HelenusProperty> properties, boolean alone, boolean combined) {
|
||||
super(SchemaUtil.createPrimaryKeyPhrase(properties));
|
||||
this.properties = properties;
|
||||
this.alone = alone;
|
||||
this.combined = combined;
|
||||
}
|
||||
|
||||
public UnboundFacet(HelenusProperty property) {
|
||||
public UnboundFacet(List<HelenusProperty> properties) {
|
||||
this(properties, true, true);
|
||||
}
|
||||
|
||||
public UnboundFacet(HelenusProperty property, boolean alone, boolean combined) {
|
||||
super(property.getPropertyName());
|
||||
properties = new ArrayList<HelenusProperty>();
|
||||
properties.add(property);
|
||||
this.alone = alone;
|
||||
this.combined = combined;
|
||||
}
|
||||
|
||||
public UnboundFacet(HelenusProperty property) {
|
||||
this(property, true, true);
|
||||
}
|
||||
|
||||
public List<HelenusProperty> getProperties() {
|
||||
|
@ -43,18 +57,22 @@ public class UnboundFacet extends Facet<String> {
|
|||
}
|
||||
|
||||
public Binder binder() {
|
||||
return new Binder(name(), properties);
|
||||
return new Binder(name(), properties, alone, combined);
|
||||
}
|
||||
|
||||
public static class Binder {
|
||||
|
||||
private final String name;
|
||||
private final boolean alone;
|
||||
private final boolean combined;
|
||||
private final List<HelenusProperty> properties = new ArrayList<HelenusProperty>();
|
||||
private Map<HelenusProperty, Object> boundProperties = new HashMap<HelenusProperty, Object>();
|
||||
|
||||
Binder(String name, List<HelenusProperty> properties) {
|
||||
Binder(String name, List<HelenusProperty> properties, boolean alone, boolean combined) {
|
||||
this.name = name;
|
||||
this.properties.addAll(properties);
|
||||
this.alone = alone;
|
||||
this.combined = combined;
|
||||
}
|
||||
|
||||
public Binder setValueForProperty(HelenusProperty prop, Object value) {
|
||||
|
@ -68,7 +86,10 @@ public class UnboundFacet extends Facet<String> {
|
|||
}
|
||||
|
||||
public BoundFacet bind() {
|
||||
return new BoundFacet(name, boundProperties);
|
||||
BoundFacet facet = new BoundFacet(name, boundProperties);
|
||||
facet.setUniquelyIdentifyingWhenAlone(alone);
|
||||
facet.setUniquelyIdentifyingWhenCombined(combined);
|
||||
return facet;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -15,14 +16,15 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
||||
import java.util.*;
|
||||
import net.helenus.core.*;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.core.cache.UnboundFacet;
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
|
||||
public abstract class AbstractFilterOperation<E, O extends AbstractFilterOperation<E, O>>
|
||||
extends
|
||||
AbstractOperation<E, O> {
|
||||
extends AbstractOperation<E, O> {
|
||||
|
||||
protected List<Filter<?>> filters = null;
|
||||
protected List<Filter<?>> ifFilters = null;
|
||||
|
@ -107,4 +109,60 @@ public abstract class AbstractFilterOperation<E, O extends AbstractFilterOperati
|
|||
}
|
||||
ifFilters.add(filter);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean isIdempotentOperation() {
|
||||
if (filters == null) {
|
||||
return super.isIdempotentOperation();
|
||||
}
|
||||
|
||||
return filters
|
||||
.stream()
|
||||
.anyMatch(
|
||||
filter -> {
|
||||
HelenusPropertyNode node = filter.getNode();
|
||||
if (node != null) {
|
||||
HelenusProperty prop = node.getProperty();
|
||||
if (prop != null) {
|
||||
return prop.isIdempotent();
|
||||
}
|
||||
}
|
||||
return false;
|
||||
})
|
||||
|| super.isIdempotentOperation();
|
||||
}
|
||||
|
||||
protected List<Facet> bindFacetValues(List<Facet> facets) {
|
||||
if (facets == null) {
|
||||
return new ArrayList<Facet>();
|
||||
}
|
||||
List<Facet> boundFacets = new ArrayList<>();
|
||||
Map<HelenusProperty, Filter> filterMap = new HashMap<>(filters.size());
|
||||
filters.forEach(f -> filterMap.put(f.getNode().getProperty(), f));
|
||||
|
||||
for (Facet facet : facets) {
|
||||
if (facet instanceof UnboundFacet) {
|
||||
UnboundFacet unboundFacet = (UnboundFacet) facet;
|
||||
UnboundFacet.Binder binder = unboundFacet.binder();
|
||||
if (filters != null) {
|
||||
for (HelenusProperty prop : unboundFacet.getProperties()) {
|
||||
|
||||
Filter filter = filterMap.get(prop);
|
||||
if (filter != null) {
|
||||
Object[] postulates = filter.postulateValues();
|
||||
for (Object p : postulates) {
|
||||
binder.setValueForProperty(prop, p.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (binder.isBound()) {
|
||||
boundFacets.add(binder.bind());
|
||||
}
|
||||
} else {
|
||||
boundFacets.add(facet);
|
||||
}
|
||||
}
|
||||
return boundFacets;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -19,13 +20,12 @@ import java.util.LinkedHashMap;
|
|||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import net.helenus.core.*;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
|
||||
public abstract class AbstractFilterOptionalOperation<E, O extends AbstractFilterOptionalOperation<E, O>>
|
||||
extends
|
||||
AbstractOptionalOperation<E, O> {
|
||||
public abstract class AbstractFilterOptionalOperation<
|
||||
E, O extends AbstractFilterOptionalOperation<E, O>>
|
||||
extends AbstractOptionalOperation<E, O> {
|
||||
|
||||
protected Map<HelenusProperty, Filter<?>> filters = null;
|
||||
protected List<Filter<?>> ifFilters = null;
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -19,13 +20,12 @@ import java.util.LinkedHashMap;
|
|||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import net.helenus.core.*;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
|
||||
public abstract class AbstractFilterStreamOperation<E, O extends AbstractFilterStreamOperation<E, O>>
|
||||
extends
|
||||
AbstractStreamOperation<E, O> {
|
||||
public abstract class AbstractFilterStreamOperation<
|
||||
E, O extends AbstractFilterStreamOperation<E, O>>
|
||||
extends AbstractStreamOperation<E, O> {
|
||||
|
||||
protected Map<HelenusProperty, Filter<?>> filters = null;
|
||||
protected List<Filter<?>> ifFilters = null;
|
||||
|
@ -43,7 +43,7 @@ public abstract class AbstractFilterStreamOperation<E, O extends AbstractFilterS
|
|||
|
||||
public <V> O where(Getter<V> getter, Operator operator, V val) {
|
||||
|
||||
addFilter(Filter.create(getter, operator, val));
|
||||
if (val != null) addFilter(Filter.create(getter, operator, val));
|
||||
|
||||
return (O) this;
|
||||
}
|
||||
|
@ -64,7 +64,7 @@ public abstract class AbstractFilterStreamOperation<E, O extends AbstractFilterS
|
|||
|
||||
public <V> O and(Getter<V> getter, Operator operator, V val) {
|
||||
|
||||
addFilter(Filter.create(getter, operator, val));
|
||||
if (val != null) addFilter(Filter.create(getter, operator, val));
|
||||
|
||||
return (O) this;
|
||||
}
|
||||
|
@ -85,7 +85,7 @@ public abstract class AbstractFilterStreamOperation<E, O extends AbstractFilterS
|
|||
|
||||
public <V> O onlyIf(Getter<V> getter, Operator operator, V val) {
|
||||
|
||||
addIfFilter(Filter.create(getter, operator, val));
|
||||
if (val != null) addIfFilter(Filter.create(getter, operator, val));
|
||||
|
||||
return (O) this;
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -15,15 +16,16 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
import com.codahale.metrics.Timer;
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.CompletionException;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import net.helenus.core.AbstractSessionOperations;
|
||||
import net.helenus.core.UnitOfWork;
|
||||
|
||||
public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>> extends AbstractStatementOperation<E, O> {
|
||||
public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>>
|
||||
extends AbstractStatementOperation<E, O> {
|
||||
|
||||
public AbstractOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
|
@ -31,33 +33,29 @@ public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>> ex
|
|||
|
||||
public abstract E transform(ResultSet resultSet);
|
||||
|
||||
public boolean cacheable() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public PreparedOperation<E> prepare() {
|
||||
return new PreparedOperation<E>(prepareStatement(), this);
|
||||
}
|
||||
|
||||
public E sync() {// throws TimeoutException {
|
||||
public E sync() throws TimeoutException {
|
||||
final Timer.Context context = requestLatency.time();
|
||||
try {
|
||||
ResultSet resultSet = this.execute(sessionOps, null, traceContext, queryExecutionTimeout, queryTimeoutUnits,
|
||||
showValues, false);
|
||||
ResultSet resultSet =
|
||||
this.execute(
|
||||
sessionOps, null, queryExecutionTimeout, queryTimeoutUnits, showValues, false);
|
||||
return transform(resultSet);
|
||||
} finally {
|
||||
context.stop();
|
||||
}
|
||||
}
|
||||
|
||||
public E sync(UnitOfWork uow) {// throws TimeoutException {
|
||||
if (uow == null)
|
||||
return sync();
|
||||
public E sync(UnitOfWork uow) throws TimeoutException {
|
||||
if (uow == null) return sync();
|
||||
|
||||
final Timer.Context context = requestLatency.time();
|
||||
try {
|
||||
ResultSet resultSet = execute(sessionOps, uow, traceContext, queryExecutionTimeout, queryTimeoutUnits,
|
||||
showValues, true);
|
||||
ResultSet resultSet =
|
||||
execute(sessionOps, uow, queryExecutionTimeout, queryTimeoutUnits, showValues, true);
|
||||
E result = transform(resultSet);
|
||||
return result;
|
||||
} finally {
|
||||
|
@ -66,24 +64,28 @@ public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>> ex
|
|||
}
|
||||
|
||||
public CompletableFuture<E> async() {
|
||||
return CompletableFuture.<E>supplyAsync(() -> {
|
||||
// try {
|
||||
return CompletableFuture.<E>supplyAsync(
|
||||
() -> {
|
||||
try {
|
||||
return sync();
|
||||
// } catch (TimeoutException ex) {
|
||||
// throw new CompletionException(ex);
|
||||
// }
|
||||
} catch (TimeoutException ex) {
|
||||
throw new CompletionException(ex);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public CompletableFuture<E> async(UnitOfWork uow) {
|
||||
if (uow == null)
|
||||
return async();
|
||||
return CompletableFuture.<E>supplyAsync(() -> {
|
||||
// try {
|
||||
if (uow == null) return async();
|
||||
CompletableFuture<E> f =
|
||||
CompletableFuture.<E>supplyAsync(
|
||||
() -> {
|
||||
try {
|
||||
return sync();
|
||||
// } catch (TimeoutException ex) {
|
||||
// throw new CompletionException(ex);
|
||||
// }
|
||||
} catch (TimeoutException ex) {
|
||||
throw new CompletionException(ex);
|
||||
}
|
||||
});
|
||||
uow.addFuture(f);
|
||||
return f;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -15,9 +16,7 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import static net.helenus.core.HelenusSession.deleted;
|
||||
|
||||
import com.codahale.metrics.Timer;
|
||||
import com.datastax.driver.core.PreparedStatement;
|
||||
|
@ -26,15 +25,23 @@ import com.google.common.base.Function;
|
|||
import com.google.common.base.Stopwatch;
|
||||
import com.google.common.util.concurrent.Futures;
|
||||
import com.google.common.util.concurrent.ListenableFuture;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.CompletionException;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import net.helenus.core.AbstractSessionOperations;
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.UnitOfWork;
|
||||
import net.helenus.core.cache.CacheUtil;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.mapping.MappingUtil;
|
||||
import net.helenus.support.Fun;
|
||||
import org.apache.commons.lang3.SerializationUtils;
|
||||
|
||||
public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOperation<E, O>>
|
||||
extends
|
||||
AbstractStatementOperation<E, O> {
|
||||
extends AbstractStatementOperation<E, O> {
|
||||
|
||||
public AbstractOptionalOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
|
@ -48,7 +55,8 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
|
|||
|
||||
public ListenableFuture<PreparedOptionalOperation<E>> prepareAsync() {
|
||||
final O _this = (O) this;
|
||||
return Futures.transform(prepareStatementAsync(),
|
||||
return Futures.transform(
|
||||
prepareStatementAsync(),
|
||||
new Function<PreparedStatement, PreparedOptionalOperation<E>>() {
|
||||
@Override
|
||||
public PreparedOptionalOperation<E> apply(PreparedStatement preparedStatement) {
|
||||
|
@ -57,36 +65,58 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
|
|||
});
|
||||
}
|
||||
|
||||
public Optional<E> sync() {// throws TimeoutException {
|
||||
public Optional<E> sync() throws TimeoutException {
|
||||
final Timer.Context context = requestLatency.time();
|
||||
try {
|
||||
Optional<E> result = Optional.empty();
|
||||
E cacheResult = null;
|
||||
boolean updateCache = isSessionCacheable();
|
||||
boolean updateCache = isSessionCacheable() && !ignoreCache();
|
||||
|
||||
if (enableCache && isSessionCacheable()) {
|
||||
if (updateCache) {
|
||||
List<Facet> facets = bindFacetValues();
|
||||
if (facets != null && facets.size() > 0) {
|
||||
if (facets.stream().filter(f -> !f.fixed()).distinct().count() > 0) {
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
cacheResult = (E) sessionOps.checkCache(tableName, facets);
|
||||
if (cacheResult != null) {
|
||||
result = Optional.of(cacheResult);
|
||||
updateCache = false;
|
||||
sessionCacheHits.mark();
|
||||
cacheHits.mark();
|
||||
} else {
|
||||
sessionCacheMiss.mark();
|
||||
cacheMiss.mark();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
//TODO(gburd): look in statement cache for results
|
||||
}
|
||||
}
|
||||
|
||||
if (!result.isPresent()) {
|
||||
// Formulate the query and execute it against the Cassandra cluster.
|
||||
ResultSet resultSet = this.execute(sessionOps, null, traceContext, queryExecutionTimeout,
|
||||
queryTimeoutUnits, showValues, false);
|
||||
ResultSet resultSet =
|
||||
this.execute(
|
||||
sessionOps,
|
||||
null,
|
||||
queryExecutionTimeout,
|
||||
queryTimeoutUnits,
|
||||
showValues,
|
||||
isSessionCacheable());
|
||||
|
||||
// Transform the query result set into the desired shape.
|
||||
result = transform(resultSet);
|
||||
}
|
||||
|
||||
if (updateCache && result.isPresent()) {
|
||||
E r = result.get();
|
||||
Class<?> resultClass = r.getClass();
|
||||
if (!(resultClass.getEnclosingClass() != null
|
||||
&& resultClass.getEnclosingClass() == Fun.class)) {
|
||||
List<Facet> facets = getFacets();
|
||||
if (facets != null && facets.size() > 1) {
|
||||
sessionOps.updateCache(result.get(), facets);
|
||||
sessionOps.updateCache(r, facets);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
|
@ -95,51 +125,100 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
|
|||
}
|
||||
}
|
||||
|
||||
public Optional<E> sync(UnitOfWork<?> uow) {// throws TimeoutException {
|
||||
if (uow == null)
|
||||
return sync();
|
||||
public Optional<E> sync(UnitOfWork uow) throws TimeoutException {
|
||||
if (uow == null) return sync();
|
||||
|
||||
final Timer.Context context = requestLatency.time();
|
||||
try {
|
||||
|
||||
Optional<E> result = Optional.empty();
|
||||
E cacheResult = null;
|
||||
boolean updateCache = true;
|
||||
E cachedResult = null;
|
||||
final boolean updateCache;
|
||||
|
||||
if (enableCache) {
|
||||
Stopwatch timer = uow.getCacheLookupTimer();
|
||||
timer.start();
|
||||
if (!ignoreCache()) {
|
||||
Stopwatch timer = Stopwatch.createStarted();
|
||||
try {
|
||||
List<Facet> facets = bindFacetValues();
|
||||
cacheResult = checkCache(uow, facets);
|
||||
if (cacheResult != null) {
|
||||
result = Optional.of(cacheResult);
|
||||
if (facets != null && facets.size() > 0) {
|
||||
if (facets.stream().filter(f -> !f.fixed()).distinct().count() > 0) {
|
||||
cachedResult = checkCache(uow, facets);
|
||||
if (cachedResult != null) {
|
||||
updateCache = false;
|
||||
result = Optional.of(cachedResult);
|
||||
uowCacheHits.mark();
|
||||
cacheHits.mark();
|
||||
uow.recordCacheAndDatabaseOperationCount(1, 0);
|
||||
} else {
|
||||
uowCacheMiss.mark();
|
||||
if (isSessionCacheable()) {
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
cacheResult = (E) sessionOps.checkCache(tableName, facets);
|
||||
if (cacheResult != null) {
|
||||
result = Optional.of(cacheResult);
|
||||
cachedResult = (E) sessionOps.checkCache(tableName, facets);
|
||||
if (cachedResult != null) {
|
||||
Class<?> iface = MappingUtil.getMappingInterface(cachedResult);
|
||||
if (Helenus.entity(iface).isDraftable()) {
|
||||
result = Optional.of(cachedResult);
|
||||
} else {
|
||||
result =
|
||||
Optional.of(
|
||||
(E)
|
||||
SerializationUtils.<Serializable>clone(
|
||||
(Serializable) cachedResult));
|
||||
}
|
||||
updateCache = false;
|
||||
sessionCacheHits.mark();
|
||||
cacheHits.mark();
|
||||
uow.recordCacheAndDatabaseOperationCount(1, 0);
|
||||
} else {
|
||||
updateCache = true;
|
||||
sessionCacheMiss.mark();
|
||||
cacheMiss.mark();
|
||||
uow.recordCacheAndDatabaseOperationCount(-1, 0);
|
||||
}
|
||||
} else {
|
||||
updateCache = false;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
//TODO(gburd): look in statement cache for results
|
||||
updateCache = false; //true;
|
||||
cacheMiss.mark();
|
||||
uow.recordCacheAndDatabaseOperationCount(-1, 0);
|
||||
}
|
||||
} else {
|
||||
updateCache = false;
|
||||
}
|
||||
} finally {
|
||||
timer.stop();
|
||||
uow.addCacheLookupTime(timer);
|
||||
}
|
||||
} else {
|
||||
updateCache = false;
|
||||
}
|
||||
|
||||
if (!result.isPresent()) {
|
||||
// Check to see if we fetched the object from the cache
|
||||
if (result.isPresent()) {
|
||||
// If we fetched the `deleted` object then the result is null (really
|
||||
// Optional.empty()).
|
||||
if (result.get() == deleted) {
|
||||
result = Optional.empty();
|
||||
}
|
||||
} else {
|
||||
|
||||
// Formulate the query and execute it against the Cassandra cluster.
|
||||
ResultSet resultSet = execute(sessionOps, uow, traceContext, queryExecutionTimeout, queryTimeoutUnits,
|
||||
showValues, true);
|
||||
ResultSet resultSet =
|
||||
execute(sessionOps, uow, queryExecutionTimeout, queryTimeoutUnits, showValues, true);
|
||||
|
||||
// Transform the query result set into the desired shape.
|
||||
result = transform(resultSet);
|
||||
}
|
||||
|
||||
// If we have a result, it wasn't from the UOW cache, and we're caching things
|
||||
// then we
|
||||
// need to put this result into the cache for future requests to find.
|
||||
// then we need to put this result into the cache for future requests to find.
|
||||
if (updateCache && result.isPresent()) {
|
||||
updateCache(uow, result.get(), getFacets());
|
||||
E r = result.get();
|
||||
if (!(r instanceof Fun) && r != deleted) {
|
||||
cacheUpdate(uow, r, getFacets());
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
|
@ -149,24 +228,28 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
|
|||
}
|
||||
|
||||
public CompletableFuture<Optional<E>> async() {
|
||||
return CompletableFuture.<Optional<E>>supplyAsync(() -> {
|
||||
// try {
|
||||
return CompletableFuture.<Optional<E>>supplyAsync(
|
||||
() -> {
|
||||
try {
|
||||
return sync();
|
||||
// } catch (TimeoutException ex) {
|
||||
// throw new CompletionException(ex);
|
||||
// }
|
||||
} catch (TimeoutException ex) {
|
||||
throw new CompletionException(ex);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public CompletableFuture<Optional<E>> async(UnitOfWork<?> uow) {
|
||||
if (uow == null)
|
||||
return async();
|
||||
return CompletableFuture.<Optional<E>>supplyAsync(() -> {
|
||||
// try {
|
||||
public CompletableFuture<Optional<E>> async(UnitOfWork uow) {
|
||||
if (uow == null) return async();
|
||||
CompletableFuture<Optional<E>> f =
|
||||
CompletableFuture.<Optional<E>>supplyAsync(
|
||||
() -> {
|
||||
try {
|
||||
return sync();
|
||||
// } catch (TimeoutException ex) {
|
||||
// throw new CompletionException(ex);
|
||||
// }
|
||||
} catch (TimeoutException ex) {
|
||||
throw new CompletionException(ex);
|
||||
}
|
||||
});
|
||||
uow.addFuture(f);
|
||||
return f;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -15,15 +16,6 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.datastax.driver.core.ConsistencyLevel;
|
||||
import com.datastax.driver.core.PreparedStatement;
|
||||
import com.datastax.driver.core.RegularStatement;
|
||||
|
@ -34,33 +26,30 @@ import com.datastax.driver.core.policies.FallthroughRetryPolicy;
|
|||
import com.datastax.driver.core.policies.RetryPolicy;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
import com.google.common.util.concurrent.ListenableFuture;
|
||||
|
||||
import brave.Tracer;
|
||||
import brave.propagation.TraceContext;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import net.helenus.core.AbstractSessionOperations;
|
||||
import net.helenus.core.UnitOfWork;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.core.cache.UnboundFacet;
|
||||
import net.helenus.core.reflect.MapExportable;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
import net.helenus.mapping.value.BeanColumnValueProvider;
|
||||
import net.helenus.support.HelenusException;
|
||||
|
||||
public abstract class AbstractStatementOperation<E, O extends AbstractStatementOperation<E, O>> extends Operation<E> {
|
||||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(AbstractStatementOperation.class);
|
||||
|
||||
protected boolean enableCache = true;
|
||||
protected boolean showValues = true;
|
||||
protected TraceContext traceContext;
|
||||
long queryExecutionTimeout = 10;
|
||||
TimeUnit queryTimeoutUnits = TimeUnit.SECONDS;
|
||||
public abstract class AbstractStatementOperation<E, O extends AbstractStatementOperation<E, O>>
|
||||
extends Operation<E> {
|
||||
private boolean ignoreCache = false;
|
||||
private ConsistencyLevel consistencyLevel;
|
||||
private ConsistencyLevel serialConsistencyLevel;
|
||||
private RetryPolicy retryPolicy;
|
||||
private boolean idempotent = false;
|
||||
private boolean enableTracing = false;
|
||||
private long[] defaultTimestamp = null;
|
||||
private int[] fetchSize = null;
|
||||
protected boolean idempotent = false;
|
||||
|
||||
public AbstractStatementOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
|
@ -70,13 +59,13 @@ public abstract class AbstractStatementOperation<E, O extends AbstractStatementO
|
|||
|
||||
public abstract Statement buildStatement(boolean cached);
|
||||
|
||||
public O ignoreCache(boolean enabled) {
|
||||
enableCache = enabled;
|
||||
public O uncached(boolean enabled) {
|
||||
ignoreCache = !enabled;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
public O ignoreCache() {
|
||||
enableCache = true;
|
||||
public O uncached() {
|
||||
ignoreCache = true;
|
||||
return (O) this;
|
||||
}
|
||||
|
||||
|
@ -257,22 +246,16 @@ public abstract class AbstractStatementOperation<E, O extends AbstractStatementO
|
|||
statement.setFetchSize(fetchSize[0]);
|
||||
}
|
||||
|
||||
if (idempotent) {
|
||||
if (isIdempotentOperation()) {
|
||||
statement.setIdempotent(true);
|
||||
}
|
||||
|
||||
return statement;
|
||||
}
|
||||
|
||||
public O zipkinContext(TraceContext traceContext) {
|
||||
if (traceContext != null) {
|
||||
Tracer tracer = this.sessionOps.getZipkinTracer();
|
||||
if (tracer != null) {
|
||||
this.traceContext = traceContext;
|
||||
}
|
||||
}
|
||||
|
||||
return (O) this;
|
||||
@Override
|
||||
protected boolean isIdempotentOperation() {
|
||||
return idempotent;
|
||||
}
|
||||
|
||||
public Statement statement() {
|
||||
|
@ -281,8 +264,7 @@ public abstract class AbstractStatementOperation<E, O extends AbstractStatementO
|
|||
|
||||
public String cql() {
|
||||
Statement statement = buildStatement(false);
|
||||
if (statement == null)
|
||||
return "";
|
||||
if (statement == null) return "";
|
||||
if (statement instanceof BuiltStatement) {
|
||||
BuiltStatement buildStatement = (BuiltStatement) statement;
|
||||
return buildStatement.setForceNoValues(true).getQueryString();
|
||||
|
@ -319,50 +301,56 @@ public abstract class AbstractStatementOperation<E, O extends AbstractStatementO
|
|||
throw new HelenusException("only RegularStatements can be prepared");
|
||||
}
|
||||
|
||||
protected E checkCache(UnitOfWork<?> uow, List<Facet> facets) {
|
||||
protected boolean ignoreCache() {
|
||||
return ignoreCache;
|
||||
}
|
||||
|
||||
protected E checkCache(UnitOfWork uow, List<Facet> facets) {
|
||||
E result = null;
|
||||
Optional<Object> optionalCachedResult = Optional.empty();
|
||||
|
||||
if (!facets.isEmpty()) {
|
||||
optionalCachedResult = uow.cacheLookup(facets);
|
||||
if (optionalCachedResult.isPresent()) {
|
||||
uowCacheHits.mark();
|
||||
LOG.info("UnitOfWork({}) cache hit using facets", uow.hashCode());
|
||||
result = (E) optionalCachedResult.get();
|
||||
}
|
||||
}
|
||||
|
||||
if (result == null) {
|
||||
uowCacheMiss.mark();
|
||||
LOG.info("UnitOfWork({}) cache miss", uow.hashCode());
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
protected void updateCache(UnitOfWork<?> uow, E pojo, List<Facet> identifyingFacets) {
|
||||
protected Object cacheUpdate(UnitOfWork uow, E pojo, List<Facet> identifyingFacets) {
|
||||
List<Facet> facets = new ArrayList<>();
|
||||
Map<String, Object> valueMap = pojo instanceof MapExportable ? ((MapExportable) pojo).toMap() : null;
|
||||
Map<String, Object> valueMap =
|
||||
pojo instanceof MapExportable ? ((MapExportable) pojo).toMap() : null;
|
||||
|
||||
for (Facet facet : identifyingFacets) {
|
||||
if (facet instanceof UnboundFacet) {
|
||||
UnboundFacet unboundFacet = (UnboundFacet) facet;
|
||||
UnboundFacet.Binder binder = unboundFacet.binder();
|
||||
unboundFacet.getProperties().forEach(prop -> {
|
||||
for (HelenusProperty prop : unboundFacet.getProperties()) {
|
||||
Object value;
|
||||
if (valueMap == null) {
|
||||
Object value = BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
value = BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
if (value != null) {
|
||||
binder.setValueForProperty(prop, value.toString());
|
||||
} else {
|
||||
binder.setValueForProperty(prop, valueMap.get(prop.getPropertyName()).toString());
|
||||
}
|
||||
} else {
|
||||
value = valueMap.get(prop.getPropertyName());
|
||||
if (value != null) {
|
||||
binder.setValueForProperty(prop, value.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
if (binder.isBound()) {
|
||||
facets.add(binder.bind());
|
||||
});
|
||||
}
|
||||
} else {
|
||||
facets.add(facet);
|
||||
}
|
||||
}
|
||||
|
||||
// Cache the value (pojo), the statement key, and the fully bound facets.
|
||||
uow.cacheUpdate(pojo, facets);
|
||||
return uow.cacheUpdate(pojo, facets);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -15,10 +16,7 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.stream.Stream;
|
||||
import static net.helenus.core.HelenusSession.deleted;
|
||||
|
||||
import com.codahale.metrics.Timer;
|
||||
import com.datastax.driver.core.PreparedStatement;
|
||||
|
@ -27,15 +25,24 @@ import com.google.common.base.Function;
|
|||
import com.google.common.base.Stopwatch;
|
||||
import com.google.common.util.concurrent.Futures;
|
||||
import com.google.common.util.concurrent.ListenableFuture;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.CompletionException;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.stream.Stream;
|
||||
import net.helenus.core.AbstractSessionOperations;
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.UnitOfWork;
|
||||
import net.helenus.core.cache.CacheUtil;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.mapping.MappingUtil;
|
||||
import net.helenus.support.Fun;
|
||||
import org.apache.commons.lang3.SerializationUtils;
|
||||
|
||||
public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperation<E, O>>
|
||||
extends
|
||||
AbstractStatementOperation<E, O> {
|
||||
extends AbstractStatementOperation<E, O> {
|
||||
|
||||
public AbstractStreamOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
|
@ -49,7 +56,8 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
|
|||
|
||||
public ListenableFuture<PreparedStreamOperation<E>> prepareAsync() {
|
||||
final O _this = (O) this;
|
||||
return Futures.transform(prepareStatementAsync(),
|
||||
return Futures.transform(
|
||||
prepareStatementAsync(),
|
||||
new Function<PreparedStatement, PreparedStreamOperation<E>>() {
|
||||
@Override
|
||||
public PreparedStreamOperation<E> apply(PreparedStatement preparedStatement) {
|
||||
|
@ -58,27 +66,44 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
|
|||
});
|
||||
}
|
||||
|
||||
public Stream<E> sync() {// throws TimeoutException {
|
||||
public Stream<E> sync() throws TimeoutException {
|
||||
final Timer.Context context = requestLatency.time();
|
||||
try {
|
||||
Stream<E> resultStream = null;
|
||||
E cacheResult = null;
|
||||
boolean updateCache = isSessionCacheable();
|
||||
|
||||
if (enableCache && isSessionCacheable()) {
|
||||
if (!ignoreCache() && isSessionCacheable()) {
|
||||
List<Facet> facets = bindFacetValues();
|
||||
if (facets != null && facets.size() > 0) {
|
||||
if (facets.stream().filter(f -> !f.fixed()).distinct().count() > 0) {
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
cacheResult = (E) sessionOps.checkCache(tableName, facets);
|
||||
if (cacheResult != null) {
|
||||
resultStream = Stream.of(cacheResult);
|
||||
updateCache = false;
|
||||
sessionCacheHits.mark();
|
||||
cacheHits.mark();
|
||||
} else {
|
||||
sessionCacheMiss.mark();
|
||||
cacheMiss.mark();
|
||||
}
|
||||
} else {
|
||||
//TODO(gburd): look in statement cache for results
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (resultStream == null) {
|
||||
// Formulate the query and execute it against the Cassandra cluster.
|
||||
ResultSet resultSet = this.execute(sessionOps, null, traceContext, queryExecutionTimeout,
|
||||
queryTimeoutUnits, showValues, false);
|
||||
ResultSet resultSet =
|
||||
this.execute(
|
||||
sessionOps,
|
||||
null,
|
||||
queryExecutionTimeout,
|
||||
queryTimeoutUnits,
|
||||
showValues,
|
||||
isSessionCacheable());
|
||||
|
||||
// Transform the query result set into the desired shape.
|
||||
resultStream = transform(resultSet);
|
||||
|
@ -88,8 +113,13 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
|
|||
List<Facet> facets = getFacets();
|
||||
if (facets != null && facets.size() > 1) {
|
||||
List<E> again = new ArrayList<>();
|
||||
resultStream.forEach(result -> {
|
||||
resultStream.forEach(
|
||||
result -> {
|
||||
Class<?> resultClass = result.getClass();
|
||||
if (!(resultClass.getEnclosingClass() != null
|
||||
&& resultClass.getEnclosingClass() == Fun.class)) {
|
||||
sessionOps.updateCache(result, facets);
|
||||
}
|
||||
again.add(result);
|
||||
});
|
||||
resultStream = again.stream();
|
||||
|
@ -102,45 +132,100 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
|
|||
}
|
||||
}
|
||||
|
||||
public Stream<E> sync(UnitOfWork<?> uow) {// throws TimeoutException {
|
||||
if (uow == null)
|
||||
return sync();
|
||||
public Stream<E> sync(UnitOfWork uow) throws TimeoutException {
|
||||
if (uow == null) return sync();
|
||||
|
||||
final Timer.Context context = requestLatency.time();
|
||||
try {
|
||||
Stream<E> resultStream = null;
|
||||
E cachedResult = null;
|
||||
boolean updateCache = true;
|
||||
final boolean updateCache;
|
||||
|
||||
if (enableCache) {
|
||||
Stopwatch timer = uow.getCacheLookupTimer();
|
||||
timer.start();
|
||||
if (!ignoreCache()) {
|
||||
Stopwatch timer = Stopwatch.createStarted();
|
||||
try {
|
||||
List<Facet> facets = bindFacetValues();
|
||||
if (facets != null && facets.size() > 0) {
|
||||
if (facets.stream().filter(f -> !f.fixed()).distinct().count() > 0) {
|
||||
cachedResult = checkCache(uow, facets);
|
||||
if (cachedResult != null) {
|
||||
updateCache = false;
|
||||
resultStream = Stream.of(cachedResult);
|
||||
uowCacheHits.mark();
|
||||
cacheHits.mark();
|
||||
uow.recordCacheAndDatabaseOperationCount(1, 0);
|
||||
} else {
|
||||
uowCacheMiss.mark();
|
||||
if (isSessionCacheable()) {
|
||||
String tableName = CacheUtil.schemaName(facets);
|
||||
cachedResult = (E) sessionOps.checkCache(tableName, facets);
|
||||
if (cachedResult != null) {
|
||||
Class<?> iface = MappingUtil.getMappingInterface(cachedResult);
|
||||
E result = null;
|
||||
if (Helenus.entity(iface).isDraftable()) {
|
||||
result = cachedResult;
|
||||
} else {
|
||||
result =
|
||||
(E) SerializationUtils.<Serializable>clone((Serializable) cachedResult);
|
||||
}
|
||||
updateCache = false;
|
||||
resultStream = Stream.of(result);
|
||||
sessionCacheHits.mark();
|
||||
cacheHits.mark();
|
||||
uow.recordCacheAndDatabaseOperationCount(1, 0);
|
||||
} else {
|
||||
updateCache = true;
|
||||
sessionCacheMiss.mark();
|
||||
cacheMiss.mark();
|
||||
uow.recordCacheAndDatabaseOperationCount(-1, 0);
|
||||
}
|
||||
} else {
|
||||
updateCache = false;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
//TODO(gburd): look in statement cache for results
|
||||
updateCache = false; //true;
|
||||
cacheMiss.mark();
|
||||
uow.recordCacheAndDatabaseOperationCount(-1, 0);
|
||||
}
|
||||
} else {
|
||||
updateCache = false;
|
||||
}
|
||||
} finally {
|
||||
timer.stop();
|
||||
uow.addCacheLookupTime(timer);
|
||||
}
|
||||
} else {
|
||||
updateCache = false;
|
||||
}
|
||||
|
||||
// Check to see if we fetched the object from the cache
|
||||
if (resultStream == null) {
|
||||
ResultSet resultSet = execute(sessionOps, uow, traceContext, queryExecutionTimeout, queryTimeoutUnits,
|
||||
showValues, true);
|
||||
ResultSet resultSet =
|
||||
execute(sessionOps, uow, queryExecutionTimeout, queryTimeoutUnits, showValues, true);
|
||||
resultStream = transform(resultSet);
|
||||
}
|
||||
|
||||
// If we have a result and we're caching then we need to put it into the cache
|
||||
// for future requests to find.
|
||||
if (updateCache && resultStream != null) {
|
||||
if (resultStream != null) {
|
||||
if (updateCache) {
|
||||
List<E> again = new ArrayList<>();
|
||||
List<Facet> facets = getFacets();
|
||||
resultStream.forEach(result -> {
|
||||
updateCache(uow, result, facets);
|
||||
resultStream.forEach(
|
||||
result -> {
|
||||
Class<?> resultClass = result.getClass();
|
||||
if (result != deleted
|
||||
&& !(resultClass.getEnclosingClass() != null
|
||||
&& resultClass.getEnclosingClass() == Fun.class)) {
|
||||
result = (E) cacheUpdate(uow, result, facets);
|
||||
}
|
||||
again.add(result);
|
||||
});
|
||||
resultStream = again.stream();
|
||||
}
|
||||
}
|
||||
|
||||
return resultStream;
|
||||
} finally {
|
||||
|
@ -149,24 +234,28 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
|
|||
}
|
||||
|
||||
public CompletableFuture<Stream<E>> async() {
|
||||
return CompletableFuture.<Stream<E>>supplyAsync(() -> {
|
||||
// try {
|
||||
return CompletableFuture.<Stream<E>>supplyAsync(
|
||||
() -> {
|
||||
try {
|
||||
return sync();
|
||||
// } catch (TimeoutException ex) {
|
||||
// throw new CompletionException(ex);
|
||||
// }
|
||||
} catch (TimeoutException ex) {
|
||||
throw new CompletionException(ex);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public CompletableFuture<Stream<E>> async(UnitOfWork<?> uow) {
|
||||
if (uow == null)
|
||||
return async();
|
||||
return CompletableFuture.<Stream<E>>supplyAsync(() -> {
|
||||
// try {
|
||||
public CompletableFuture<Stream<E>> async(UnitOfWork uow) {
|
||||
if (uow == null) return async();
|
||||
CompletableFuture<Stream<E>> f =
|
||||
CompletableFuture.<Stream<E>>supplyAsync(
|
||||
() -> {
|
||||
try {
|
||||
return sync();
|
||||
// } catch (TimeoutException ex) {
|
||||
// throw new CompletionException(ex);
|
||||
// }
|
||||
} catch (TimeoutException ex) {
|
||||
throw new CompletionException(ex);
|
||||
}
|
||||
});
|
||||
uow.addFuture(f);
|
||||
return f;
|
||||
}
|
||||
}
|
||||
|
|
140
src/main/java/net/helenus/core/operation/BatchOperation.java
Normal file
140
src/main/java/net/helenus/core/operation/BatchOperation.java
Normal file
|
@ -0,0 +1,140 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import com.codahale.metrics.Timer;
|
||||
import com.datastax.driver.core.AtomicMonotonicTimestampGenerator;
|
||||
import com.datastax.driver.core.BatchStatement;
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.TimestampGenerator;
|
||||
import com.google.common.base.Stopwatch;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.stream.Collectors;
|
||||
import net.helenus.core.AbstractSessionOperations;
|
||||
import net.helenus.core.UnitOfWork;
|
||||
import net.helenus.support.HelenusException;
|
||||
|
||||
public class BatchOperation extends Operation<Long> {
|
||||
//TODO(gburd): find the way to get the driver's timestamp generator
|
||||
private static final TimestampGenerator timestampGenerator =
|
||||
new AtomicMonotonicTimestampGenerator();
|
||||
|
||||
private final BatchStatement batch;
|
||||
private List<AbstractOperation<?, ?>> operations = new ArrayList<AbstractOperation<?, ?>>();
|
||||
private boolean logged = true;
|
||||
|
||||
public BatchOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
batch = new BatchStatement();
|
||||
}
|
||||
|
||||
public void add(AbstractOperation<?, ?> operation) {
|
||||
operations.add(operation);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BatchStatement buildStatement(boolean cached) {
|
||||
batch.addAll(
|
||||
operations.stream().map(o -> o.buildStatement(cached)).collect(Collectors.toList()));
|
||||
batch.setConsistencyLevel(sessionOps.getDefaultConsistencyLevel());
|
||||
return batch;
|
||||
}
|
||||
|
||||
public BatchOperation logged() {
|
||||
logged = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
public BatchOperation setLogged(boolean logStatements) {
|
||||
logged = logStatements;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Long sync() {
|
||||
if (operations.size() == 0) return 0L;
|
||||
final Timer.Context context = requestLatency.time();
|
||||
try {
|
||||
batch.setDefaultTimestamp(timestampGenerator.next());
|
||||
ResultSet resultSet =
|
||||
this.execute(
|
||||
sessionOps, null, queryExecutionTimeout, queryTimeoutUnits, showValues, false);
|
||||
if (!resultSet.wasApplied()) {
|
||||
throw new HelenusException("Failed to apply batch.");
|
||||
}
|
||||
} catch (TimeoutException e) {
|
||||
throw new HelenusException(e);
|
||||
} finally {
|
||||
context.stop();
|
||||
}
|
||||
return batch.getDefaultTimestamp();
|
||||
}
|
||||
|
||||
public Long sync(UnitOfWork uow) {
|
||||
if (operations.size() == 0) return 0L;
|
||||
if (uow == null) return sync();
|
||||
|
||||
final Timer.Context context = requestLatency.time();
|
||||
final Stopwatch timer = Stopwatch.createStarted();
|
||||
try {
|
||||
uow.recordCacheAndDatabaseOperationCount(0, 1);
|
||||
batch.setDefaultTimestamp(timestampGenerator.next());
|
||||
ResultSet resultSet =
|
||||
this.execute(
|
||||
sessionOps, uow, queryExecutionTimeout, queryTimeoutUnits, showValues, false);
|
||||
if (!resultSet.wasApplied()) {
|
||||
throw new HelenusException("Failed to apply batch.");
|
||||
}
|
||||
} catch (TimeoutException e) {
|
||||
throw new HelenusException(e);
|
||||
} finally {
|
||||
context.stop();
|
||||
timer.stop();
|
||||
}
|
||||
uow.addDatabaseTime("Cassandra", timer);
|
||||
return batch.getDefaultTimestamp();
|
||||
}
|
||||
|
||||
public void addAll(BatchOperation batch) {
|
||||
batch.operations.forEach(o -> this.operations.add(o));
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return toString(true); //TODO(gburd): sessionOps.showQueryValues()
|
||||
}
|
||||
|
||||
public String toString(boolean showValues) {
|
||||
StringBuilder s = new StringBuilder();
|
||||
s.append("BEGIN ");
|
||||
if (!logged) {
|
||||
s.append("UNLOGGED ");
|
||||
}
|
||||
s.append("BATCH ");
|
||||
|
||||
if (batch.getDefaultTimestamp() > -9223372036854775808L) {
|
||||
s.append("USING TIMESTAMP ").append(String.valueOf(batch.getDefaultTimestamp())).append(" ");
|
||||
}
|
||||
s.append(
|
||||
operations
|
||||
.stream()
|
||||
.map(o -> Operation.queryString(o.buildStatement(showValues), showValues))
|
||||
.collect(Collectors.joining(" ")));
|
||||
s.append(" APPLY BATCH;");
|
||||
return s.toString();
|
||||
}
|
||||
}
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -15,18 +16,19 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
import com.datastax.driver.core.BoundStatement;
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.Statement;
|
||||
import java.util.Optional;
|
||||
|
||||
public final class BoundOptionalOperation<E> extends AbstractOptionalOperation<E, BoundOptionalOperation<E>> {
|
||||
public final class BoundOptionalOperation<E>
|
||||
extends AbstractOptionalOperation<E, BoundOptionalOperation<E>> {
|
||||
|
||||
private final BoundStatement boundStatement;
|
||||
private final AbstractOptionalOperation<E, ?> delegate;
|
||||
|
||||
public BoundOptionalOperation(BoundStatement boundStatement, AbstractOptionalOperation<E, ?> operation) {
|
||||
public BoundOptionalOperation(
|
||||
BoundStatement boundStatement, AbstractOptionalOperation<E, ?> operation) {
|
||||
super(operation.sessionOps);
|
||||
this.boundStatement = boundStatement;
|
||||
this.delegate = operation;
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -15,21 +16,21 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import com.datastax.driver.core.BoundStatement;
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.Statement;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.stream.Stream;
|
||||
import net.helenus.core.cache.Facet;
|
||||
|
||||
public final class BoundStreamOperation<E> extends AbstractStreamOperation<E, BoundStreamOperation<E>> {
|
||||
public final class BoundStreamOperation<E>
|
||||
extends AbstractStreamOperation<E, BoundStreamOperation<E>> {
|
||||
|
||||
private final BoundStatement boundStatement;
|
||||
private final AbstractStreamOperation<E, ?> delegate;
|
||||
|
||||
public BoundStreamOperation(BoundStatement boundStatement, AbstractStreamOperation<E, ?> operation) {
|
||||
public BoundStreamOperation(
|
||||
BoundStatement boundStatement, AbstractStreamOperation<E, ?> operation) {
|
||||
super(operation.sessionOps);
|
||||
this.boundStatement = boundStatement;
|
||||
this.delegate = operation;
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -20,7 +21,6 @@ import com.datastax.driver.core.querybuilder.BuiltStatement;
|
|||
import com.datastax.driver.core.querybuilder.QueryBuilder;
|
||||
import com.datastax.driver.core.querybuilder.Select;
|
||||
import com.datastax.driver.core.querybuilder.Select.Where;
|
||||
|
||||
import net.helenus.core.AbstractSessionOperations;
|
||||
import net.helenus.core.Filter;
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
|
@ -38,6 +38,7 @@ public final class CountOperation extends AbstractFilterOperation<Long, CountOpe
|
|||
public CountOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity) {
|
||||
super(sessionOperations);
|
||||
this.entity = entity;
|
||||
//TODO(gburd): cache SELECT COUNT results within the scope of a UOW
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -74,8 +75,11 @@ public final class CountOperation extends AbstractFilterOperation<Long, CountOpe
|
|||
if (entity == null) {
|
||||
entity = p.getEntity();
|
||||
} else if (entity != p.getEntity()) {
|
||||
throw new HelenusMappingException("you can count columns only in single entity "
|
||||
+ entity.getMappingInterface() + " or " + p.getEntity().getMappingInterface());
|
||||
throw new HelenusMappingException(
|
||||
"you can count columns only in single entity "
|
||||
+ entity.getMappingInterface()
|
||||
+ " or "
|
||||
+ p.getEntity().getMappingInterface());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -20,11 +21,15 @@ import com.datastax.driver.core.querybuilder.BuiltStatement;
|
|||
import com.datastax.driver.core.querybuilder.Delete;
|
||||
import com.datastax.driver.core.querybuilder.Delete.Where;
|
||||
import com.datastax.driver.core.querybuilder.QueryBuilder;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import net.helenus.core.AbstractSessionOperations;
|
||||
import net.helenus.core.Filter;
|
||||
import net.helenus.core.UnitOfWork;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.support.HelenusException;
|
||||
import net.helenus.support.HelenusMappingException;
|
||||
|
||||
public final class DeleteOperation extends AbstractFilterOperation<ResultSet, DeleteOperation> {
|
||||
|
@ -118,8 +123,53 @@ public final class DeleteOperation extends AbstractFilterOperation<ResultSet, De
|
|||
if (entity == null) {
|
||||
entity = p.getEntity();
|
||||
} else if (entity != p.getEntity()) {
|
||||
throw new HelenusMappingException("you can delete rows only in single entity "
|
||||
+ entity.getMappingInterface() + " or " + p.getEntity().getMappingInterface());
|
||||
throw new HelenusMappingException(
|
||||
"you can delete rows only in single entity "
|
||||
+ entity.getMappingInterface()
|
||||
+ " or "
|
||||
+ p.getEntity().getMappingInterface());
|
||||
}
|
||||
}
|
||||
|
||||
public List<Facet> bindFacetValues() {
|
||||
return bindFacetValues(getFacets());
|
||||
}
|
||||
|
||||
protected boolean isIdempotentOperation() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ResultSet sync() throws TimeoutException {
|
||||
ResultSet result = super.sync();
|
||||
if (entity.isCacheable()) {
|
||||
sessionOps.cacheEvict(bindFacetValues());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ResultSet sync(UnitOfWork uow) throws TimeoutException {
|
||||
if (uow == null) {
|
||||
return sync();
|
||||
}
|
||||
ResultSet result = super.sync(uow);
|
||||
uow.cacheEvict(bindFacetValues());
|
||||
return result;
|
||||
}
|
||||
|
||||
public ResultSet batch(UnitOfWork uow) throws TimeoutException {
|
||||
if (uow == null) {
|
||||
throw new HelenusException("UnitOfWork cannot be null when batching operations.");
|
||||
}
|
||||
|
||||
uow.cacheEvict(bindFacetValues());
|
||||
uow.batch(this);
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Facet> getFacets() {
|
||||
return entity.getFacets();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -15,21 +16,24 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.function.Function;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
import com.datastax.driver.core.querybuilder.Insert;
|
||||
import com.datastax.driver.core.querybuilder.QueryBuilder;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
import net.helenus.core.AbstractSessionOperations;
|
||||
import net.helenus.core.Getter;
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.UnitOfWork;
|
||||
import net.helenus.core.cache.CacheUtil;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.core.cache.UnboundFacet;
|
||||
import net.helenus.core.reflect.DefaultPrimitiveTypes;
|
||||
import net.helenus.core.reflect.Drafted;
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.core.reflect.MapExportable;
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
import net.helenus.mapping.MappingUtil;
|
||||
|
@ -40,38 +44,63 @@ import net.helenus.support.HelenusMappingException;
|
|||
|
||||
public final class InsertOperation<T> extends AbstractOperation<T, InsertOperation<T>> {
|
||||
|
||||
private HelenusEntity entity;
|
||||
|
||||
private final List<Fun.Tuple2<HelenusPropertyNode, Object>> values = new ArrayList<Fun.Tuple2<HelenusPropertyNode, Object>>();
|
||||
private final List<Fun.Tuple2<HelenusPropertyNode, Object>> values =
|
||||
new ArrayList<Fun.Tuple2<HelenusPropertyNode, Object>>();
|
||||
private final T pojo;
|
||||
private final Class<?> resultType;
|
||||
private final Set<String> readSet;
|
||||
private HelenusEntity entity;
|
||||
private boolean ifNotExists;
|
||||
|
||||
private int[] ttl;
|
||||
private long[] timestamp;
|
||||
private long writeTime = 0L;
|
||||
|
||||
public InsertOperation(AbstractSessionOperations sessionOperations, boolean ifNotExists) {
|
||||
super(sessionOperations);
|
||||
|
||||
this.ifNotExists = ifNotExists;
|
||||
this.pojo = null;
|
||||
this.readSet = null;
|
||||
this.ifNotExists = ifNotExists;
|
||||
this.resultType = ResultSet.class;
|
||||
}
|
||||
|
||||
public InsertOperation(AbstractSessionOperations sessionOperations, Class<?> resultType, boolean ifNotExists) {
|
||||
public InsertOperation(
|
||||
AbstractSessionOperations sessionOperations,
|
||||
HelenusEntity entity,
|
||||
Class<?> resultType,
|
||||
boolean ifNotExists) {
|
||||
super(sessionOperations);
|
||||
|
||||
this.ifNotExists = ifNotExists;
|
||||
this.pojo = null;
|
||||
this.readSet = null;
|
||||
this.ifNotExists = ifNotExists;
|
||||
this.resultType = resultType;
|
||||
this.entity = entity;
|
||||
}
|
||||
|
||||
public InsertOperation(
|
||||
AbstractSessionOperations sessionOperations, Class<?> resultType, boolean ifNotExists) {
|
||||
super(sessionOperations);
|
||||
|
||||
this.pojo = null;
|
||||
this.readSet = null;
|
||||
this.ifNotExists = ifNotExists;
|
||||
this.resultType = resultType;
|
||||
}
|
||||
|
||||
public InsertOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity, T pojo,
|
||||
Set<String> mutations, boolean ifNotExists) {
|
||||
public InsertOperation(
|
||||
AbstractSessionOperations sessionOperations,
|
||||
HelenusEntity entity,
|
||||
T pojo,
|
||||
Set<String> mutations,
|
||||
Set<String> read,
|
||||
boolean ifNotExists) {
|
||||
super(sessionOperations);
|
||||
|
||||
this.entity = entity;
|
||||
this.pojo = pojo;
|
||||
this.readSet = read;
|
||||
this.entity = entity;
|
||||
this.ifNotExists = ifNotExists;
|
||||
this.resultType = entity.getMappingInterface();
|
||||
|
||||
|
@ -82,11 +111,11 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
|
|||
boolean addProp = false;
|
||||
|
||||
switch (prop.getColumnType()) {
|
||||
case PARTITION_KEY :
|
||||
case CLUSTERING_COLUMN :
|
||||
case PARTITION_KEY:
|
||||
case CLUSTERING_COLUMN:
|
||||
addProp = true;
|
||||
break;
|
||||
default :
|
||||
default:
|
||||
addProp = (keys == null || keys.contains(prop.getPropertyName()));
|
||||
}
|
||||
|
||||
|
@ -130,11 +159,34 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
|
|||
|
||||
@Override
|
||||
public BuiltStatement buildStatement(boolean cached) {
|
||||
List<HelenusEntity> entities =
|
||||
values
|
||||
.stream()
|
||||
.map(t -> t._1.getProperty().getEntity())
|
||||
.distinct()
|
||||
.collect(Collectors.toList());
|
||||
if (entities.size() != 1) {
|
||||
throw new HelenusMappingException(
|
||||
"you can insert only single entity at a time, found: "
|
||||
+ entities
|
||||
.stream()
|
||||
.map(e -> e.getMappingInterface().toString())
|
||||
.collect(Collectors.joining(", ")));
|
||||
}
|
||||
HelenusEntity entity = entities.get(0);
|
||||
if (this.entity != null) {
|
||||
if (this.entity != entity) {
|
||||
throw new HelenusMappingException(
|
||||
"you can insert only single entity at a time, found: "
|
||||
+ this.entity.getMappingInterface().toString()
|
||||
+ ", "
|
||||
+ entity.getMappingInterface().toString());
|
||||
}
|
||||
} else {
|
||||
this.entity = entity;
|
||||
}
|
||||
|
||||
values.forEach(t -> addPropertyNode(t._1));
|
||||
|
||||
if (values.isEmpty())
|
||||
return null;
|
||||
if (values.isEmpty()) return null;
|
||||
|
||||
if (entity == null) {
|
||||
throw new HelenusMappingException("unknown entity");
|
||||
|
@ -146,10 +198,13 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
|
|||
insert.ifNotExists();
|
||||
}
|
||||
|
||||
values.forEach(t -> {
|
||||
values.forEach(
|
||||
t -> {
|
||||
insert.value(t._1.getColumnName(), t._2);
|
||||
});
|
||||
|
||||
//TODO(gburd): IF NOT EXISTS when @Constraints.Relationship is 1:1 or 1:m
|
||||
|
||||
if (this.ttl != null) {
|
||||
insert.using(QueryBuilder.ttl(this.ttl[0]));
|
||||
}
|
||||
|
@ -160,12 +215,9 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
|
|||
return insert;
|
||||
}
|
||||
|
||||
@Override
|
||||
public T transform(ResultSet resultSet) {
|
||||
Class<?> iface = entity.getMappingInterface();
|
||||
if (resultType == iface) {
|
||||
private T newInstance(Class<?> iface) {
|
||||
if (values.size() > 0) {
|
||||
boolean immutable = iface.isAssignableFrom(Drafted.class);
|
||||
boolean immutable = entity.isDraftable();
|
||||
Collection<HelenusProperty> properties = entity.getOrderedProperties();
|
||||
Map<String, Object> backingMap = new HashMap<String, Object>(properties.size());
|
||||
|
||||
|
@ -178,8 +230,8 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
|
|||
if (backingMap.containsKey(key)) {
|
||||
// Some values man need to be converted (e.g. from String to Enum). This is done
|
||||
// within the BeanColumnValueProvider below.
|
||||
Optional<Function<Object, Object>> converter = prop
|
||||
.getReadConverter(sessionOps.getSessionRepository());
|
||||
Optional<Function<Object, Object>> converter =
|
||||
prop.getReadConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
backingMap.put(key, converter.get().apply(backingMap.get(key)));
|
||||
}
|
||||
|
@ -187,8 +239,8 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
|
|||
// If we started this operation with an instance of this type, use values from
|
||||
// that.
|
||||
if (pojo != null) {
|
||||
backingMap.put(key,
|
||||
BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, immutable));
|
||||
backingMap.put(
|
||||
key, BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, immutable));
|
||||
} else {
|
||||
// Otherwise we'll use default values for the property type if available.
|
||||
Class<?> propType = prop.getJavaType();
|
||||
|
@ -206,11 +258,24 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
|
|||
// Lastly, create a new proxy object for the entity and return the new instance.
|
||||
return (T) Helenus.map(iface, backingMap);
|
||||
}
|
||||
// Oddly, this insert didn't change any value so simply return the pojo.
|
||||
// TODO(gburd): this pojo is the result of a Draft.build() call which will not
|
||||
// preserve object identity (o1 == o2), ... fix me.
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public T transform(ResultSet resultSet) {
|
||||
if ((ifNotExists == true) && (resultSet.wasApplied() == false)) {
|
||||
throw new HelenusException("Statement was not applied due to consistency constraints");
|
||||
}
|
||||
|
||||
Class<?> iface = entity.getMappingInterface();
|
||||
if (resultType == iface) {
|
||||
T o = newInstance(iface);
|
||||
if (o == null) {
|
||||
// Oddly, this insert didn't change anything so simply return the pojo.
|
||||
return (T) pojo;
|
||||
}
|
||||
return o;
|
||||
}
|
||||
return (T) resultSet;
|
||||
}
|
||||
|
||||
|
@ -226,25 +291,134 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
|
|||
return this;
|
||||
}
|
||||
|
||||
private void addPropertyNode(HelenusPropertyNode p) {
|
||||
if (entity == null) {
|
||||
entity = p.getEntity();
|
||||
} else if (entity != p.getEntity()) {
|
||||
throw new HelenusMappingException("you can insert only single entity " + entity.getMappingInterface()
|
||||
+ " or " + p.getEntity().getMappingInterface());
|
||||
protected void adjustTtlAndWriteTime(MapExportable pojo) {
|
||||
if (ttl != null || writeTime != 0L) {
|
||||
List<String> columnNames =
|
||||
values
|
||||
.stream()
|
||||
.map(t -> t._1.getProperty())
|
||||
.filter(
|
||||
prop -> {
|
||||
switch (prop.getColumnType()) {
|
||||
case PARTITION_KEY:
|
||||
case CLUSTERING_COLUMN:
|
||||
return false;
|
||||
default:
|
||||
return true;
|
||||
}
|
||||
})
|
||||
.map(prop -> prop.getColumnName().toCql(false))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
if (columnNames.size() > 0) {
|
||||
if (ttl != null) {
|
||||
columnNames.forEach(name -> pojo.put(CacheUtil.ttlKey(name), ttl));
|
||||
}
|
||||
if (writeTime != 0L) {
|
||||
columnNames.forEach(name -> pojo.put(CacheUtil.writeTimeKey(name), writeTime));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public T sync(UnitOfWork uow) {// throws TimeoutException {
|
||||
protected boolean isIdempotentOperation() {
|
||||
return values.stream().map(v -> v._1.getProperty()).allMatch(prop -> prop.isIdempotent())
|
||||
|| super.isIdempotentOperation();
|
||||
}
|
||||
|
||||
@Override
|
||||
public T sync() throws TimeoutException {
|
||||
T result = super.sync();
|
||||
if (entity.isCacheable() && result != null) {
|
||||
adjustTtlAndWriteTime((MapExportable) result);
|
||||
sessionOps.updateCache(result, bindFacetValues());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public T sync(UnitOfWork uow) throws TimeoutException {
|
||||
if (uow == null) {
|
||||
return sync();
|
||||
}
|
||||
T result = super.sync(uow);
|
||||
if (result != null && pojo != null && !(pojo == result) && pojo.equals(result)) {
|
||||
// To preserve object identity we need to find this object in cache
|
||||
// because it was unchanged by the INSERT but pojo in this case was
|
||||
// the result of a draft.build().
|
||||
T cachedValue = (T) uow.cacheLookup(bindFacetValues());
|
||||
if (cachedValue != null) {
|
||||
result = cachedValue;
|
||||
}
|
||||
}
|
||||
Class<?> iface = entity.getMappingInterface();
|
||||
if (resultType == iface) {
|
||||
updateCache(uow, result, entity.getFacets());
|
||||
if (entity != null && MapExportable.class.isAssignableFrom(entity.getMappingInterface())) {
|
||||
adjustTtlAndWriteTime((MapExportable) result);
|
||||
}
|
||||
cacheUpdate(uow, result, bindFacetValues());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public T batch(UnitOfWork uow) throws TimeoutException {
|
||||
if (uow == null) {
|
||||
throw new HelenusException("UnitOfWork cannot be null when batching operations.");
|
||||
}
|
||||
|
||||
if (this.entity != null) {
|
||||
Class<?> iface = this.entity.getMappingInterface();
|
||||
if (resultType == iface) {
|
||||
final T result = (pojo == null) ? newInstance(iface) : pojo;
|
||||
if (result != null) {
|
||||
adjustTtlAndWriteTime((MapExportable) result);
|
||||
cacheUpdate(uow, result, bindFacetValues());
|
||||
}
|
||||
uow.batch(this);
|
||||
return (T) result;
|
||||
}
|
||||
}
|
||||
|
||||
return sync(uow);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Facet> bindFacetValues() {
|
||||
List<Facet> facets = getFacets();
|
||||
if (facets == null || facets.size() == 0) {
|
||||
return new ArrayList<Facet>();
|
||||
}
|
||||
List<Facet> boundFacets = new ArrayList<>();
|
||||
Map<HelenusProperty, Object> valuesMap = new HashMap<>(values.size());
|
||||
values.forEach(t -> valuesMap.put(t._1.getProperty(), t._2));
|
||||
|
||||
for (Facet facet : facets) {
|
||||
if (facet instanceof UnboundFacet) {
|
||||
UnboundFacet unboundFacet = (UnboundFacet) facet;
|
||||
UnboundFacet.Binder binder = unboundFacet.binder();
|
||||
for (HelenusProperty prop : unboundFacet.getProperties()) {
|
||||
Object value = valuesMap.get(prop);
|
||||
if (value != null) {
|
||||
binder.setValueForProperty(prop, value.toString());
|
||||
}
|
||||
}
|
||||
if (binder.isBound()) {
|
||||
boundFacets.add(binder.bind());
|
||||
}
|
||||
} else {
|
||||
boundFacets.add(facet);
|
||||
}
|
||||
}
|
||||
return boundFacets;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Facet> getFacets() {
|
||||
if (entity != null) {
|
||||
return entity.getFacets();
|
||||
} else {
|
||||
return new ArrayList<Facet>();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -15,77 +16,177 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import com.codahale.metrics.Meter;
|
||||
import com.codahale.metrics.MetricRegistry;
|
||||
import com.codahale.metrics.Timer;
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.ResultSetFuture;
|
||||
import com.datastax.driver.core.Statement;
|
||||
import com.datastax.driver.core.*;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
import com.google.common.base.Stopwatch;
|
||||
|
||||
import brave.Span;
|
||||
import brave.Tracer;
|
||||
import brave.propagation.TraceContext;
|
||||
import java.net.InetAddress;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.stream.Collectors;
|
||||
import net.helenus.core.AbstractSessionOperations;
|
||||
import net.helenus.core.UnitOfWork;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.support.HelenusException;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public abstract class Operation<E> {
|
||||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(Operation.class);
|
||||
|
||||
protected final AbstractSessionOperations sessionOps;
|
||||
protected boolean showValues;
|
||||
protected long queryExecutionTimeout = 10;
|
||||
protected TimeUnit queryTimeoutUnits = TimeUnit.SECONDS;
|
||||
protected final Meter uowCacheHits;
|
||||
protected final Meter uowCacheMiss;
|
||||
protected final Meter sessionCacheHits;
|
||||
protected final Meter sessionCacheMiss;
|
||||
protected final Meter cacheHits;
|
||||
protected final Meter cacheMiss;
|
||||
protected final Timer requestLatency;
|
||||
|
||||
Operation(AbstractSessionOperations sessionOperations) {
|
||||
this.sessionOps = sessionOperations;
|
||||
this.showValues = sessionOps.showValues();
|
||||
MetricRegistry metrics = sessionOperations.getMetricRegistry();
|
||||
if (metrics == null) {
|
||||
metrics = new MetricRegistry();
|
||||
}
|
||||
this.uowCacheHits = metrics.meter("net.helenus.UOW-cache-hits");
|
||||
this.uowCacheMiss = metrics.meter("net.helenus.UOW-cache-miss");
|
||||
this.sessionCacheHits = metrics.meter("net.helenus.session-cache-hits");
|
||||
this.sessionCacheMiss = metrics.meter("net.helenus.session-cache-miss");
|
||||
this.cacheHits = metrics.meter("net.helenus.cache-hits");
|
||||
this.cacheMiss = metrics.meter("net.helenus.cache-miss");
|
||||
this.requestLatency = metrics.timer("net.helenus.request-latency");
|
||||
}
|
||||
|
||||
public ResultSet execute(AbstractSessionOperations session, UnitOfWork uow, TraceContext traceContext, long timeout,
|
||||
TimeUnit units, boolean showValues, boolean cached) { // throws TimeoutException {
|
||||
|
||||
// Start recording in a Zipkin sub-span our execution time to perform this
|
||||
// operation.
|
||||
Tracer tracer = session.getZipkinTracer();
|
||||
Span span = null;
|
||||
if (tracer != null && traceContext != null) {
|
||||
span = tracer.newChild(traceContext);
|
||||
public static String queryString(BatchOperation operation, boolean includeValues) {
|
||||
return operation.toString(includeValues);
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
if (span != null) {
|
||||
span.name("cassandra");
|
||||
span.start();
|
||||
public static String queryString(Statement statement, boolean includeValues) {
|
||||
String query = null;
|
||||
if (statement instanceof BuiltStatement) {
|
||||
BuiltStatement builtStatement = (BuiltStatement) statement;
|
||||
if (includeValues) {
|
||||
RegularStatement regularStatement = builtStatement.setForceNoValues(true);
|
||||
query = regularStatement.getQueryString();
|
||||
} else {
|
||||
query = builtStatement.getQueryString();
|
||||
}
|
||||
} else if (statement instanceof RegularStatement) {
|
||||
RegularStatement regularStatement = (RegularStatement) statement;
|
||||
query = regularStatement.getQueryString();
|
||||
} else {
|
||||
query = statement.toString();
|
||||
}
|
||||
return query;
|
||||
}
|
||||
|
||||
public ResultSet execute(
|
||||
AbstractSessionOperations session,
|
||||
UnitOfWork uow,
|
||||
long timeout,
|
||||
TimeUnit units,
|
||||
boolean showValues,
|
||||
boolean cached)
|
||||
throws TimeoutException {
|
||||
|
||||
Statement statement = options(buildStatement(cached));
|
||||
Stopwatch timer = null;
|
||||
if (uow != null) {
|
||||
timer = uow.getExecutionTimer();
|
||||
timer.start();
|
||||
|
||||
if (session.isShowCql()) {
|
||||
String stmt =
|
||||
(this instanceof BatchOperation)
|
||||
? queryString((BatchOperation) this, showValues)
|
||||
: queryString(statement, showValues);
|
||||
session.getPrintStream().println(stmt);
|
||||
} else if (LOG.isDebugEnabled()) {
|
||||
String stmt =
|
||||
(this instanceof BatchOperation)
|
||||
? queryString((BatchOperation) this, showValues)
|
||||
: queryString(statement, showValues);
|
||||
LOG.info("CQL> " + stmt);
|
||||
}
|
||||
ResultSetFuture futureResultSet = session.executeAsync(statement, showValues);
|
||||
ResultSet resultSet = futureResultSet.getUninterruptibly(); // TODO(gburd): (timeout, units);
|
||||
|
||||
if (uow != null)
|
||||
timer.stop();
|
||||
|
||||
Stopwatch timer = Stopwatch.createStarted();
|
||||
try {
|
||||
ResultSetFuture futureResultSet = session.executeAsync(statement, uow, timer);
|
||||
if (uow != null) uow.recordCacheAndDatabaseOperationCount(0, 1);
|
||||
ResultSet resultSet = futureResultSet.getUninterruptibly(timeout, units);
|
||||
ColumnDefinitions columnDefinitions = resultSet.getColumnDefinitions();
|
||||
if (LOG.isDebugEnabled()) {
|
||||
ExecutionInfo ei = resultSet.getExecutionInfo();
|
||||
Host qh = ei.getQueriedHost();
|
||||
String oh =
|
||||
ei.getTriedHosts()
|
||||
.stream()
|
||||
.map(Host::getAddress)
|
||||
.map(InetAddress::toString)
|
||||
.collect(Collectors.joining(", "));
|
||||
ConsistencyLevel cl = ei.getAchievedConsistencyLevel();
|
||||
if (cl == null) {
|
||||
cl = statement.getConsistencyLevel();
|
||||
}
|
||||
int se = ei.getSpeculativeExecutions();
|
||||
String warn = ei.getWarnings().stream().collect(Collectors.joining(", "));
|
||||
String ri =
|
||||
String.format(
|
||||
"%s %s ~%s %s %s%s%sspec-retries: %d",
|
||||
"server v" + qh.getCassandraVersion(),
|
||||
qh.getAddress().toString(),
|
||||
(oh != null && !oh.equals("")) ? " [tried: " + oh + "]" : "",
|
||||
qh.getDatacenter(),
|
||||
qh.getRack(),
|
||||
(cl != null)
|
||||
? (" consistency: "
|
||||
+ cl.name()
|
||||
+ " "
|
||||
+ (cl.isDCLocal() ? " DC " : "")
|
||||
+ (cl.isSerial() ? " SC " : ""))
|
||||
: "",
|
||||
(warn != null && !warn.equals("")) ? ": " + warn : "",
|
||||
se);
|
||||
if (uow != null) uow.setInfo(ri);
|
||||
else LOG.debug(ri);
|
||||
}
|
||||
if (!resultSet.wasApplied()
|
||||
&& !(columnDefinitions.size() > 1 || !columnDefinitions.contains("[applied]"))) {
|
||||
throw new HelenusException("Operation Failed");
|
||||
}
|
||||
return resultSet;
|
||||
|
||||
} finally {
|
||||
timer.stop();
|
||||
if (uow != null) uow.addDatabaseTime("Cassandra", timer);
|
||||
log(statement, uow, timer, showValues);
|
||||
}
|
||||
}
|
||||
|
||||
if (span != null) {
|
||||
span.finish();
|
||||
void log(Statement statement, UnitOfWork uow, Stopwatch timer, boolean showValues) {
|
||||
if (LOG.isInfoEnabled()) {
|
||||
String uowString = "";
|
||||
if (uow != null) {
|
||||
uowString = "UOW(" + uow.hashCode() + ")";
|
||||
}
|
||||
String timerString = "";
|
||||
if (timer != null) {
|
||||
timerString = String.format(" %s ", timer.toString());
|
||||
}
|
||||
LOG.info(
|
||||
String.format(
|
||||
"%s%s%s", uowString, timerString, Operation.queryString(statement, showValues)));
|
||||
}
|
||||
}
|
||||
|
||||
protected boolean isIdempotentOperation() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public Statement options(Statement statement) {
|
||||
|
@ -97,7 +198,7 @@ public abstract class Operation<E> {
|
|||
}
|
||||
|
||||
public List<Facet> getFacets() {
|
||||
return null;
|
||||
return new ArrayList<Facet>();
|
||||
}
|
||||
|
||||
public List<Facet> bindFacetValues() {
|
||||
|
@ -107,5 +208,4 @@ public abstract class Operation<E> {
|
|||
public boolean isSessionCacheable() {
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -43,5 +44,4 @@ public final class PreparedOperation<E> {
|
|||
public String toString() {
|
||||
return preparedStatement.getQueryString();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -23,7 +24,8 @@ public final class PreparedOptionalOperation<E> {
|
|||
private final PreparedStatement preparedStatement;
|
||||
private final AbstractOptionalOperation<E, ?> operation;
|
||||
|
||||
public PreparedOptionalOperation(PreparedStatement statement, AbstractOptionalOperation<E, ?> operation) {
|
||||
public PreparedOptionalOperation(
|
||||
PreparedStatement statement, AbstractOptionalOperation<E, ?> operation) {
|
||||
this.preparedStatement = statement;
|
||||
this.operation = operation;
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -23,7 +24,8 @@ public final class PreparedStreamOperation<E> {
|
|||
private final PreparedStatement preparedStatement;
|
||||
private final AbstractStreamOperation<E, ?> operation;
|
||||
|
||||
public PreparedStreamOperation(PreparedStatement statement, AbstractStreamOperation<E, ?> operation) {
|
||||
public PreparedStreamOperation(
|
||||
PreparedStatement statement, AbstractStreamOperation<E, ?> operation) {
|
||||
this.preparedStatement = statement;
|
||||
this.operation = operation;
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -15,16 +16,15 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Function;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
|
||||
import net.helenus.core.cache.Facet;
|
||||
|
||||
public final class SelectFirstOperation<E> extends AbstractFilterOptionalOperation<E, SelectFirstOperation<E>> {
|
||||
public final class SelectFirstOperation<E>
|
||||
extends AbstractFilterOptionalOperation<E, SelectFirstOperation<E>> {
|
||||
|
||||
private final SelectOperation<E> delegate;
|
||||
|
||||
|
@ -64,4 +64,9 @@ public final class SelectFirstOperation<E> extends AbstractFilterOptionalOperati
|
|||
public boolean isSessionCacheable() {
|
||||
return delegate.isSessionCacheable();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean ignoreCache() {
|
||||
return delegate.ignoreCache();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -15,18 +16,15 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Function;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
|
||||
import net.helenus.core.cache.Facet;
|
||||
|
||||
public final class SelectFirstTransformingOperation<R, E>
|
||||
extends
|
||||
AbstractFilterOptionalOperation<R, SelectFirstTransformingOperation<R, E>> {
|
||||
extends AbstractFilterOptionalOperation<R, SelectFirstTransformingOperation<R, E>> {
|
||||
|
||||
private final SelectOperation<E> delegate;
|
||||
private final Function<E, R> fn;
|
||||
|
@ -59,4 +57,9 @@ public final class SelectFirstTransformingOperation<R, E>
|
|||
public boolean isSessionCacheable() {
|
||||
return delegate.isSessionCacheable();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean ignoreCache() {
|
||||
return delegate.ignoreCache();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -15,14 +16,6 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Stream;
|
||||
import java.util.stream.StreamSupport;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.Row;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
|
@ -31,19 +24,26 @@ import com.datastax.driver.core.querybuilder.QueryBuilder;
|
|||
import com.datastax.driver.core.querybuilder.Select;
|
||||
import com.datastax.driver.core.querybuilder.Select.Selection;
|
||||
import com.datastax.driver.core.querybuilder.Select.Where;
|
||||
import com.google.common.collect.Iterables;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Stream;
|
||||
import java.util.stream.StreamSupport;
|
||||
import net.helenus.core.*;
|
||||
import net.helenus.core.cache.CacheUtil;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.core.cache.UnboundFacet;
|
||||
import net.helenus.core.reflect.Entity;
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
import net.helenus.mapping.MappingUtil;
|
||||
import net.helenus.mapping.OrderingDirection;
|
||||
import net.helenus.mapping.value.ColumnValueProvider;
|
||||
import net.helenus.mapping.value.ValueProviderMap;
|
||||
import net.helenus.support.Fun;
|
||||
import net.helenus.support.HelenusMappingException;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, SelectOperation<E>> {
|
||||
|
||||
|
@ -54,14 +54,17 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
|||
protected List<Ordering> ordering = null;
|
||||
protected Integer limit = null;
|
||||
protected boolean allowFiltering = false;
|
||||
|
||||
protected String alternateTableName = null;
|
||||
protected boolean isCacheable = false;
|
||||
protected boolean implementsEntityType = false;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public SelectOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
|
||||
this.rowMapper = new Function<Row, E>() {
|
||||
this.rowMapper =
|
||||
new Function<Row, E>() {
|
||||
|
||||
@Override
|
||||
public E apply(Row source) {
|
||||
|
@ -84,31 +87,47 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
|||
|
||||
super(sessionOperations);
|
||||
|
||||
entity.getOrderedProperties().stream().map(p -> new HelenusPropertyNode(p, Optional.empty()))
|
||||
entity
|
||||
.getOrderedProperties()
|
||||
.stream()
|
||||
.map(p -> new HelenusPropertyNode(p, Optional.empty()))
|
||||
.forEach(p -> this.props.add(p));
|
||||
|
||||
isCacheable = entity.isCacheable();
|
||||
this.isCacheable = entity.isCacheable();
|
||||
this.implementsEntityType = Entity.class.isAssignableFrom(entity.getMappingInterface());
|
||||
}
|
||||
|
||||
public SelectOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity,
|
||||
public SelectOperation(
|
||||
AbstractSessionOperations sessionOperations,
|
||||
HelenusEntity entity,
|
||||
Function<Row, E> rowMapper) {
|
||||
|
||||
super(sessionOperations);
|
||||
this.rowMapper = rowMapper;
|
||||
|
||||
entity.getOrderedProperties().stream().map(p -> new HelenusPropertyNode(p, Optional.empty()))
|
||||
entity
|
||||
.getOrderedProperties()
|
||||
.stream()
|
||||
.map(p -> new HelenusPropertyNode(p, Optional.empty()))
|
||||
.forEach(p -> this.props.add(p));
|
||||
|
||||
isCacheable = entity.isCacheable();
|
||||
this.isCacheable = entity.isCacheable();
|
||||
this.implementsEntityType = Entity.class.isAssignableFrom(entity.getMappingInterface());
|
||||
}
|
||||
|
||||
public SelectOperation(AbstractSessionOperations sessionOperations, Function<Row, E> rowMapper,
|
||||
public SelectOperation(
|
||||
AbstractSessionOperations sessionOperations,
|
||||
Function<Row, E> rowMapper,
|
||||
HelenusPropertyNode... props) {
|
||||
|
||||
super(sessionOperations);
|
||||
|
||||
this.rowMapper = rowMapper;
|
||||
Collections.addAll(this.props, props);
|
||||
|
||||
HelenusEntity entity = props[0].getEntity();
|
||||
this.isCacheable = entity.isCacheable();
|
||||
this.implementsEntityType = Entity.class.isAssignableFrom(entity.getMappingInterface());
|
||||
}
|
||||
|
||||
public CountOperation count() {
|
||||
|
@ -119,8 +138,11 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
|||
if (entity == null) {
|
||||
entity = prop.getEntity();
|
||||
} else if (entity != prop.getEntity()) {
|
||||
throw new HelenusMappingException("you can count records only from a single entity "
|
||||
+ entity.getMappingInterface() + " or " + prop.getEntity().getMappingInterface());
|
||||
throw new HelenusMappingException(
|
||||
"you can count records only from a single entity "
|
||||
+ entity.getMappingInterface()
|
||||
+ " or "
|
||||
+ prop.getEntity().getMappingInterface());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -132,7 +154,10 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
|||
HelenusEntity entity = Helenus.entity(materializedViewClass);
|
||||
this.alternateTableName = entity.getName().toCql();
|
||||
this.props.clear();
|
||||
entity.getOrderedProperties().stream().map(p -> new HelenusPropertyNode(p, Optional.empty()))
|
||||
entity
|
||||
.getOrderedProperties()
|
||||
.stream()
|
||||
.map(p -> new HelenusPropertyNode(p, Optional.empty()))
|
||||
.forEach(p -> this.props.add(p));
|
||||
return this;
|
||||
}
|
||||
|
@ -150,7 +175,9 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
|||
|
||||
this.rowMapper = null;
|
||||
|
||||
return new SelectTransformingOperation<R, E>(this, (r) -> {
|
||||
return new SelectTransformingOperation<R, E>(
|
||||
this,
|
||||
(r) -> {
|
||||
Map<String, Object> map = new ValueProviderMap(r, sessionOps.getValueProvider(), entity);
|
||||
return (R) Helenus.map(entityClass, map);
|
||||
});
|
||||
|
@ -206,7 +233,8 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
|||
if (facet instanceof UnboundFacet) {
|
||||
UnboundFacet unboundFacet = (UnboundFacet) facet;
|
||||
UnboundFacet.Binder binder = unboundFacet.binder();
|
||||
unboundFacet.getProperties().forEach(prop -> {
|
||||
for (HelenusProperty prop : unboundFacet.getProperties()) {
|
||||
if (filters != null) {
|
||||
Filter filter = filters.get(prop);
|
||||
if (filter != null) {
|
||||
Object[] postulates = filter.postulateValues();
|
||||
|
@ -214,8 +242,8 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
|||
binder.setValueForProperty(prop, p.toString());
|
||||
}
|
||||
}
|
||||
|
||||
});
|
||||
}
|
||||
}
|
||||
if (binder.isBound()) {
|
||||
boundFacets.add(binder.bind());
|
||||
}
|
||||
|
@ -236,30 +264,27 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
|||
String columnName = prop.getColumnName();
|
||||
selection = selection.column(columnName);
|
||||
|
||||
if (prop.getProperty().caseSensitiveIndex()) {
|
||||
allowFiltering = true;
|
||||
}
|
||||
|
||||
if (entity == null) {
|
||||
entity = prop.getEntity();
|
||||
} else if (entity != prop.getEntity()) {
|
||||
throw new HelenusMappingException("you can select columns only from a single entity "
|
||||
+ entity.getMappingInterface() + " or " + prop.getEntity().getMappingInterface());
|
||||
throw new HelenusMappingException(
|
||||
"you can select columns only from a single entity "
|
||||
+ entity.getMappingInterface()
|
||||
+ " or "
|
||||
+ prop.getEntity().getMappingInterface());
|
||||
}
|
||||
|
||||
if (cached) {
|
||||
if (cached && implementsEntityType) {
|
||||
switch (prop.getProperty().getColumnType()) {
|
||||
case PARTITION_KEY :
|
||||
case CLUSTERING_COLUMN :
|
||||
case PARTITION_KEY:
|
||||
case CLUSTERING_COLUMN:
|
||||
break;
|
||||
default :
|
||||
default:
|
||||
if (entity.equals(prop.getEntity())) {
|
||||
if (prop.getNext().isPresent()) {
|
||||
columnName = Iterables.getLast(prop).getColumnName().toCql(true);
|
||||
}
|
||||
if (!prop.getProperty().getDataType().isCollectionType()) {
|
||||
selection.writeTime(columnName).as(columnName + "_writeTime");
|
||||
selection.ttl(columnName).as(columnName + "_ttl");
|
||||
columnName = prop.getProperty().getColumnName().toCql(false);
|
||||
selection.ttl(columnName).as('"' + CacheUtil.ttlKey(columnName) + '"');
|
||||
selection.writeTime(columnName).as('"' + CacheUtil.writeTimeKey(columnName) + '"');
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
@ -286,8 +311,35 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
|||
|
||||
Where where = select.where();
|
||||
|
||||
boolean isFirstIndex = true;
|
||||
for (Filter<?> filter : filters.values()) {
|
||||
where.and(filter.getClause(sessionOps.getValuePreparer()));
|
||||
HelenusProperty filterProp = filter.getNode().getProperty();
|
||||
HelenusProperty prop =
|
||||
props
|
||||
.stream()
|
||||
.map(HelenusPropertyNode::getProperty)
|
||||
.filter(thisProp -> thisProp.getPropertyName().equals(filterProp.getPropertyName()))
|
||||
.findFirst()
|
||||
.orElse(null);
|
||||
if (allowFiltering == false && prop != null) {
|
||||
switch (prop.getColumnType()) {
|
||||
case PARTITION_KEY:
|
||||
break;
|
||||
case CLUSTERING_COLUMN:
|
||||
default:
|
||||
// When using non-Cassandra-standard 2i types or when using more than one
|
||||
// indexed column or non-indexed columns the query must include ALLOW FILTERING.
|
||||
if (prop.caseSensitiveIndex() == false) {
|
||||
allowFiltering = true;
|
||||
} else if (prop.getIndexName() != null) {
|
||||
allowFiltering |= !isFirstIndex;
|
||||
isFirstIndex = false;
|
||||
} else {
|
||||
allowFiltering = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -306,12 +358,14 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
|||
@Override
|
||||
public Stream<E> transform(ResultSet resultSet) {
|
||||
if (rowMapper != null) {
|
||||
return StreamSupport
|
||||
.stream(Spliterators.spliteratorUnknownSize(resultSet.iterator(), Spliterator.ORDERED), false)
|
||||
return StreamSupport.stream(
|
||||
Spliterators.spliteratorUnknownSize(resultSet.iterator(), Spliterator.ORDERED), false)
|
||||
.map(rowMapper);
|
||||
} else {
|
||||
return (Stream<E>) StreamSupport
|
||||
.stream(Spliterators.spliteratorUnknownSize(resultSet.iterator(), Spliterator.ORDERED), false);
|
||||
return (Stream<E>)
|
||||
StreamSupport.stream(
|
||||
Spliterators.spliteratorUnknownSize(resultSet.iterator(), Spliterator.ORDERED),
|
||||
false);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -15,18 +16,15 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
import java.util.List;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
|
||||
import net.helenus.core.cache.Facet;
|
||||
|
||||
public final class SelectTransformingOperation<R, E>
|
||||
extends
|
||||
AbstractFilterStreamOperation<R, SelectTransformingOperation<R, E>> {
|
||||
extends AbstractFilterStreamOperation<R, SelectTransformingOperation<R, E>> {
|
||||
|
||||
private final SelectOperation<E> delegate;
|
||||
private final Function<E, R> fn;
|
||||
|
@ -59,4 +57,14 @@ public final class SelectTransformingOperation<R, E>
|
|||
public Stream<R> transform(ResultSet resultSet) {
|
||||
return delegate.transform(resultSet).map(fn);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isSessionCacheable() {
|
||||
return delegate.isSessionCacheable();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean ignoreCache() {
|
||||
return delegate.ignoreCache();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -15,53 +16,83 @@
|
|||
*/
|
||||
package net.helenus.core.operation;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.function.Function;
|
||||
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.querybuilder.Assignment;
|
||||
import com.datastax.driver.core.querybuilder.BuiltStatement;
|
||||
import com.datastax.driver.core.querybuilder.QueryBuilder;
|
||||
import com.datastax.driver.core.querybuilder.Update;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
import net.helenus.core.*;
|
||||
import net.helenus.core.cache.BoundFacet;
|
||||
import net.helenus.core.cache.CacheUtil;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.core.reflect.HelenusPropertyNode;
|
||||
import net.helenus.core.reflect.MapExportable;
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
import net.helenus.mapping.MappingUtil;
|
||||
import net.helenus.mapping.value.BeanColumnValueProvider;
|
||||
import net.helenus.support.HelenusException;
|
||||
import net.helenus.support.HelenusMappingException;
|
||||
import net.helenus.support.Immutables;
|
||||
|
||||
public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateOperation<E>> {
|
||||
|
||||
private HelenusEntity entity = null;
|
||||
|
||||
private final List<Assignment> assignments = new ArrayList<Assignment>();
|
||||
private final Map<Assignment, BoundFacet> assignments = new HashMap<>();
|
||||
private final AbstractEntityDraft<E> draft;
|
||||
private final Map<String, Object> draftMap;
|
||||
|
||||
private final Set<String> readSet;
|
||||
private HelenusEntity entity = null;
|
||||
private Object pojo;
|
||||
private int[] ttl;
|
||||
private long[] timestamp;
|
||||
private long writeTime = 0L;
|
||||
|
||||
public UpdateOperation(AbstractSessionOperations sessionOperations) {
|
||||
super(sessionOperations);
|
||||
this.draft = null;
|
||||
this.draftMap = null;
|
||||
this.readSet = null;
|
||||
}
|
||||
|
||||
public UpdateOperation(AbstractSessionOperations sessionOperations, AbstractEntityDraft<E> draft) {
|
||||
public UpdateOperation(
|
||||
AbstractSessionOperations sessionOperations, AbstractEntityDraft<E> draft) {
|
||||
super(sessionOperations);
|
||||
this.draft = draft;
|
||||
this.draftMap = draft.toMap();
|
||||
this.readSet = draft.read();
|
||||
}
|
||||
|
||||
public UpdateOperation(AbstractSessionOperations sessionOperations, HelenusPropertyNode p, Object v) {
|
||||
public UpdateOperation(AbstractSessionOperations sessionOperations, Object pojo) {
|
||||
super(sessionOperations);
|
||||
this.draft = null;
|
||||
this.draftMap = null;
|
||||
|
||||
if (pojo != null) {
|
||||
this.entity = Helenus.resolve(MappingUtil.getMappingInterface(pojo));
|
||||
if (this.entity != null && entity.isCacheable() && pojo instanceof MapExportable) {
|
||||
this.pojo = pojo;
|
||||
this.readSet = ((MapExportable) pojo).toReadSet();
|
||||
} else {
|
||||
this.readSet = null;
|
||||
}
|
||||
} else {
|
||||
this.readSet = null;
|
||||
}
|
||||
}
|
||||
|
||||
public UpdateOperation(
|
||||
AbstractSessionOperations sessionOperations, HelenusPropertyNode p, Object v) {
|
||||
super(sessionOperations);
|
||||
this.draft = null;
|
||||
this.draftMap = null;
|
||||
this.readSet = null;
|
||||
|
||||
Object value = sessionOps.getValuePreparer().prepareColumnValue(v, p.getProperty());
|
||||
assignments.add(QueryBuilder.set(p.getColumnName(), value));
|
||||
assignments.put(QueryBuilder.set(p.getColumnName(), value), new BoundFacet(p.getProperty(), v));
|
||||
|
||||
addPropertyNode(p);
|
||||
}
|
||||
|
@ -70,9 +101,24 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
Objects.requireNonNull(getter, "getter is empty");
|
||||
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(getter);
|
||||
HelenusProperty prop = p.getProperty();
|
||||
|
||||
Object value = sessionOps.getValuePreparer().prepareColumnValue(v, p.getProperty());
|
||||
assignments.add(QueryBuilder.set(p.getColumnName(), value));
|
||||
Object value = sessionOps.getValuePreparer().prepareColumnValue(v, prop);
|
||||
assignments.put(QueryBuilder.set(p.getColumnName(), value), new BoundFacet(prop, value));
|
||||
|
||||
if (draft != null) {
|
||||
String key = prop.getPropertyName();
|
||||
if (draft.get(key, value.getClass()) != v) {
|
||||
draft.set(key, v);
|
||||
}
|
||||
}
|
||||
|
||||
if (pojo != null) {
|
||||
if (!BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop).equals(v)) {
|
||||
String key = prop.getPropertyName();
|
||||
((MapExportable) pojo).put(key, v);
|
||||
}
|
||||
}
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
|
@ -97,15 +143,21 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(counterGetter);
|
||||
|
||||
assignments.add(QueryBuilder.incr(p.getColumnName(), delta));
|
||||
BoundFacet facet = null;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
if (pojo != null) {
|
||||
Long value = (Long) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop);
|
||||
facet = new BoundFacet(prop, value + delta);
|
||||
} else if (draft != null) {
|
||||
String key = prop.getPropertyName();
|
||||
draftMap.put(key, (Long) draftMap.get(key) + delta);
|
||||
facet = new BoundFacet(prop, draftMap.get(key));
|
||||
}
|
||||
|
||||
assignments.put(QueryBuilder.incr(p.getColumnName(), delta), facet);
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
draftMap.put(key, (Long) draftMap.get(key) + delta);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -119,15 +171,21 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
|
||||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(counterGetter);
|
||||
|
||||
assignments.add(QueryBuilder.decr(p.getColumnName(), delta));
|
||||
BoundFacet facet = null;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
if (pojo != null) {
|
||||
Long value = (Long) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop);
|
||||
facet = new BoundFacet(prop, value - delta);
|
||||
} else if (draft != null) {
|
||||
String key = prop.getPropertyName();
|
||||
draftMap.put(key, (Long) draftMap.get(key) - delta);
|
||||
facet = new BoundFacet(prop, draftMap.get(key));
|
||||
}
|
||||
|
||||
assignments.put(QueryBuilder.decr(p.getColumnName(), delta), facet);
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
draftMap.put(key, (Long) draftMap.get(key) - delta);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -146,16 +204,27 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
Object valueObj = prepareSingleListValue(p, value);
|
||||
|
||||
assignments.add(QueryBuilder.prepend(p.getColumnName(), valueObj));
|
||||
final List<V> list;
|
||||
final BoundFacet facet;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
if (pojo != null) {
|
||||
list = (List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
list.add(0, value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else if (draft != null) {
|
||||
String key = prop.getPropertyName();
|
||||
list = (List<V>) draftMap.get(key);
|
||||
list.add(0, value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else {
|
||||
list = null;
|
||||
facet = null;
|
||||
}
|
||||
|
||||
assignments.put(QueryBuilder.prepend(p.getColumnName(), valueObj), facet);
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
list.add(0, value);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -167,16 +236,27 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
List valueObj = prepareListValue(p, value);
|
||||
|
||||
assignments.add(QueryBuilder.prependAll(p.getColumnName(), valueObj));
|
||||
final List<V> list;
|
||||
final BoundFacet facet;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
if (pojo != null) {
|
||||
list = (List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
list.addAll(0, value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else if (draft != null && value.size() > 0) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
list = (List<V>) draftMap.get(key);
|
||||
list.addAll(0, value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else {
|
||||
list = null;
|
||||
facet = null;
|
||||
}
|
||||
|
||||
assignments.put(QueryBuilder.prependAll(p.getColumnName(), valueObj), facet);
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null && value.size() > 0) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
list.addAll(0, value);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -188,13 +268,16 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
Object valueObj = prepareSingleListValue(p, value);
|
||||
|
||||
assignments.add(QueryBuilder.setIdx(p.getColumnName(), idx, valueObj));
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
final BoundFacet facet;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
if (pojo != null || draft != null) {
|
||||
final List<V> list;
|
||||
if (pojo != null) {
|
||||
list = (List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
} else {
|
||||
String key = prop.getPropertyName();
|
||||
list = (List<V>) draftMap.get(key);
|
||||
}
|
||||
if (idx < 0) {
|
||||
list.add(0, value);
|
||||
} else if (idx > list.size()) {
|
||||
|
@ -203,8 +286,15 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
list.add(idx, value);
|
||||
}
|
||||
list.add(0, value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else {
|
||||
facet = null;
|
||||
}
|
||||
|
||||
assignments.put(QueryBuilder.setIdx(p.getColumnName(), idx, valueObj), facet);
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -216,16 +306,26 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
Object valueObj = prepareSingleListValue(p, value);
|
||||
|
||||
assignments.add(QueryBuilder.append(p.getColumnName(), valueObj));
|
||||
final List<V> list;
|
||||
final BoundFacet facet;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
if (pojo != null) {
|
||||
list = (List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
list.add(value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else if (draft != null) {
|
||||
String key = prop.getPropertyName();
|
||||
list = (List<V>) draftMap.get(key);
|
||||
list.add(value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else {
|
||||
list = null;
|
||||
facet = null;
|
||||
}
|
||||
assignments.put(QueryBuilder.append(p.getColumnName(), valueObj), facet);
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
list.add(value);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -237,16 +337,26 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
List valueObj = prepareListValue(p, value);
|
||||
|
||||
assignments.add(QueryBuilder.appendAll(p.getColumnName(), valueObj));
|
||||
final List<V> list;
|
||||
final BoundFacet facet;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
if (pojo != null) {
|
||||
list = (List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
list.addAll(value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else if (draft != null && value.size() > 0) {
|
||||
String key = prop.getPropertyName();
|
||||
list = (List<V>) draftMap.get(key);
|
||||
list.addAll(value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else {
|
||||
list = null;
|
||||
facet = null;
|
||||
}
|
||||
assignments.put(QueryBuilder.appendAll(p.getColumnName(), valueObj), facet);
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null && value.size() > 0) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
list.addAll(value);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -258,16 +368,26 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
Object valueObj = prepareSingleListValue(p, value);
|
||||
|
||||
assignments.add(QueryBuilder.discard(p.getColumnName(), valueObj));
|
||||
final List<V> list;
|
||||
final BoundFacet facet;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
if (pojo != null) {
|
||||
list = (List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
list.remove(value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else if (draft != null) {
|
||||
String key = prop.getPropertyName();
|
||||
list = (List<V>) draftMap.get(key);
|
||||
list.remove(value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else {
|
||||
list = null;
|
||||
facet = null;
|
||||
}
|
||||
assignments.put(QueryBuilder.discard(p.getColumnName(), valueObj), facet);
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
list.remove(value);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -279,16 +399,26 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
|
||||
List valueObj = prepareListValue(p, value);
|
||||
|
||||
assignments.add(QueryBuilder.discardAll(p.getColumnName(), valueObj));
|
||||
final List<V> list;
|
||||
final BoundFacet facet;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
if (pojo != null) {
|
||||
list = (List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
list.removeAll(value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else if (draft != null) {
|
||||
String key = prop.getPropertyName();
|
||||
list = (List<V>) draftMap.get(key);
|
||||
list.removeAll(value);
|
||||
facet = new BoundFacet(prop, list);
|
||||
} else {
|
||||
list = null;
|
||||
facet = null;
|
||||
}
|
||||
assignments.put(QueryBuilder.discardAll(p.getColumnName(), valueObj), facet);
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
List<V> list = (List<V>) draftMap.get(key);
|
||||
list.removeAll(value);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -297,7 +427,8 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
|
||||
Object valueObj = value;
|
||||
|
||||
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
Optional<Function<Object, Object>> converter =
|
||||
prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
List convertedList = (List) converter.get().apply(Immutables.listOf(value));
|
||||
valueObj = convertedList.get(0);
|
||||
|
@ -312,7 +443,8 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
|
||||
List valueObj = value;
|
||||
|
||||
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
Optional<Function<Object, Object>> converter =
|
||||
prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
valueObj = (List) converter.get().apply(value);
|
||||
}
|
||||
|
@ -336,16 +468,26 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
|
||||
Object valueObj = prepareSingleSetValue(p, value);
|
||||
|
||||
assignments.add(QueryBuilder.add(p.getColumnName(), valueObj));
|
||||
final Set<V> set;
|
||||
final BoundFacet facet;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
if (pojo != null) {
|
||||
set = (Set<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
set.add(value);
|
||||
facet = new BoundFacet(prop, set);
|
||||
} else if (draft != null) {
|
||||
String key = prop.getPropertyName();
|
||||
set = (Set<V>) draftMap.get(key);
|
||||
set.add(value);
|
||||
facet = new BoundFacet(prop, set);
|
||||
} else {
|
||||
set = null;
|
||||
facet = null;
|
||||
}
|
||||
assignments.put(QueryBuilder.add(p.getColumnName(), valueObj), facet);
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
Set<V> set = (Set<V>) draftMap.get(key);
|
||||
set.add(value);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -357,16 +499,26 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
|
||||
Set valueObj = prepareSetValue(p, value);
|
||||
|
||||
assignments.add(QueryBuilder.addAll(p.getColumnName(), valueObj));
|
||||
final Set<V> set;
|
||||
final BoundFacet facet;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
if (pojo != null) {
|
||||
set = (Set<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
set.addAll(value);
|
||||
facet = new BoundFacet(prop, set);
|
||||
} else if (draft != null) {
|
||||
String key = prop.getPropertyName();
|
||||
set = (Set<V>) draftMap.get(key);
|
||||
set.addAll(value);
|
||||
facet = new BoundFacet(prop, set);
|
||||
} else {
|
||||
set = null;
|
||||
facet = null;
|
||||
}
|
||||
assignments.put(QueryBuilder.addAll(p.getColumnName(), valueObj), facet);
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
Set<V> set = (Set<V>) draftMap.get(key);
|
||||
set.addAll(value);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -378,16 +530,26 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
|
||||
Object valueObj = prepareSingleSetValue(p, value);
|
||||
|
||||
assignments.add(QueryBuilder.remove(p.getColumnName(), valueObj));
|
||||
final Set<V> set;
|
||||
final BoundFacet facet;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
if (pojo != null) {
|
||||
set = (Set<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
set.remove(value);
|
||||
facet = new BoundFacet(prop, set);
|
||||
} else if (draft != null) {
|
||||
String key = prop.getPropertyName();
|
||||
set = (Set<V>) draftMap.get(key);
|
||||
set.remove(value);
|
||||
facet = new BoundFacet(prop, set);
|
||||
} else {
|
||||
set = null;
|
||||
facet = null;
|
||||
}
|
||||
assignments.put(QueryBuilder.remove(p.getColumnName(), valueObj), facet);
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
Set<V> set = (Set<V>) draftMap.get(key);
|
||||
set.remove(value);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -399,16 +561,26 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
|
||||
Set valueObj = prepareSetValue(p, value);
|
||||
|
||||
assignments.add(QueryBuilder.removeAll(p.getColumnName(), valueObj));
|
||||
final Set<V> set;
|
||||
final BoundFacet facet;
|
||||
HelenusProperty prop = p.getProperty();
|
||||
if (pojo != null) {
|
||||
set = (Set<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
set.removeAll(value);
|
||||
facet = new BoundFacet(prop, set);
|
||||
} else if (draft != null) {
|
||||
String key = prop.getPropertyName();
|
||||
set = (Set<V>) draftMap.get(key);
|
||||
set.removeAll(value);
|
||||
facet = new BoundFacet(prop, set);
|
||||
} else {
|
||||
set = null;
|
||||
facet = null;
|
||||
}
|
||||
assignments.put(QueryBuilder.removeAll(p.getColumnName(), valueObj), facet);
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
String key = p.getProperty().getPropertyName();
|
||||
Set<V> set = (Set<V>) draftMap.get(key);
|
||||
set.removeAll(value);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -417,7 +589,8 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusProperty prop = p.getProperty();
|
||||
Object valueObj = value;
|
||||
|
||||
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
Optional<Function<Object, Object>> converter =
|
||||
prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
Set convertedSet = (Set) converter.get().apply(Immutables.setOf(value));
|
||||
valueObj = convertedSet.iterator().next();
|
||||
|
@ -431,7 +604,8 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusProperty prop = p.getProperty();
|
||||
Set valueObj = value;
|
||||
|
||||
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
Optional<Function<Object, Object>> converter =
|
||||
prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
valueObj = (Set) converter.get().apply(value);
|
||||
}
|
||||
|
@ -455,23 +629,35 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(mapGetter);
|
||||
HelenusProperty prop = p.getProperty();
|
||||
|
||||
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
final Map<K, V> map;
|
||||
final BoundFacet facet;
|
||||
if (pojo != null) {
|
||||
map = (Map<K, V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
map.put(key, value);
|
||||
facet = new BoundFacet(prop, map);
|
||||
} else if (draft != null) {
|
||||
map = (Map<K, V>) draftMap.get(prop.getPropertyName());
|
||||
map.put(key, value);
|
||||
facet = new BoundFacet(prop, map);
|
||||
} else {
|
||||
map = null;
|
||||
facet = null;
|
||||
}
|
||||
|
||||
Optional<Function<Object, Object>> converter =
|
||||
prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
Map<Object, Object> convertedMap = (Map<Object, Object>) converter.get()
|
||||
.apply(Immutables.mapOf(key, value));
|
||||
Map<Object, Object> convertedMap =
|
||||
(Map<Object, Object>) converter.get().apply(Immutables.mapOf(key, value));
|
||||
for (Map.Entry<Object, Object> e : convertedMap.entrySet()) {
|
||||
assignments.add(QueryBuilder.put(p.getColumnName(), e.getKey(), e.getValue()));
|
||||
assignments.put(QueryBuilder.put(p.getColumnName(), e.getKey(), e.getValue()), facet);
|
||||
}
|
||||
} else {
|
||||
assignments.add(QueryBuilder.put(p.getColumnName(), key, value));
|
||||
assignments.put(QueryBuilder.put(p.getColumnName(), key, value), facet);
|
||||
}
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
((Map<K, V>) draftMap.get(prop.getPropertyName())).put(key, value);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -483,20 +669,32 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(mapGetter);
|
||||
HelenusProperty prop = p.getProperty();
|
||||
|
||||
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
final Map<K, V> newMap;
|
||||
final BoundFacet facet;
|
||||
if (pojo != null) {
|
||||
newMap = (Map<K, V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
|
||||
newMap.putAll(map);
|
||||
facet = new BoundFacet(prop, newMap);
|
||||
} else if (draft != null) {
|
||||
newMap = (Map<K, V>) draftMap.get(prop.getPropertyName());
|
||||
newMap.putAll(map);
|
||||
facet = new BoundFacet(prop, newMap);
|
||||
} else {
|
||||
newMap = null;
|
||||
facet = null;
|
||||
}
|
||||
|
||||
Optional<Function<Object, Object>> converter =
|
||||
prop.getWriteConverter(sessionOps.getSessionRepository());
|
||||
if (converter.isPresent()) {
|
||||
Map convertedMap = (Map) converter.get().apply(map);
|
||||
assignments.add(QueryBuilder.putAll(p.getColumnName(), convertedMap));
|
||||
assignments.put(QueryBuilder.putAll(p.getColumnName(), convertedMap), facet);
|
||||
} else {
|
||||
assignments.add(QueryBuilder.putAll(p.getColumnName(), map));
|
||||
assignments.put(QueryBuilder.putAll(p.getColumnName(), map), facet);
|
||||
}
|
||||
|
||||
addPropertyNode(p);
|
||||
|
||||
if (draft != null) {
|
||||
((Map<K, V>) draftMap.get(prop.getPropertyName())).putAll(map);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -509,7 +707,7 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
|
||||
Update update = QueryBuilder.update(entity.getName().toCql());
|
||||
|
||||
for (Assignment assignment : assignments) {
|
||||
for (Assignment assignment : assignments.keySet()) {
|
||||
update.with(assignment);
|
||||
}
|
||||
|
||||
|
@ -540,6 +738,10 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
|
||||
@Override
|
||||
public E transform(ResultSet resultSet) {
|
||||
if ((ifFilters != null && !ifFilters.isEmpty()) && (resultSet.wasApplied() == false)) {
|
||||
throw new HelenusException("Statement was not applied due to consistency constraints");
|
||||
}
|
||||
|
||||
if (draft != null) {
|
||||
return Helenus.map(draft.getEntityClass(), draft.toMap(draftMap));
|
||||
} else {
|
||||
|
@ -563,22 +765,140 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
|
|||
if (entity == null) {
|
||||
entity = p.getEntity();
|
||||
} else if (entity != p.getEntity()) {
|
||||
throw new HelenusMappingException("you can update columns only in single entity "
|
||||
+ entity.getMappingInterface() + " or " + p.getEntity().getMappingInterface());
|
||||
throw new HelenusMappingException(
|
||||
"you can update columns only in single entity "
|
||||
+ entity.getMappingInterface()
|
||||
+ " or "
|
||||
+ p.getEntity().getMappingInterface());
|
||||
}
|
||||
}
|
||||
|
||||
private void adjustTtlAndWriteTime(MapExportable pojo) {
|
||||
if (ttl != null || writeTime != 0L) {
|
||||
List<String> names = new ArrayList<String>(assignments.size());
|
||||
for (BoundFacet facet : assignments.values()) {
|
||||
for (HelenusProperty prop : facet.getProperties()) {
|
||||
names.add(prop.getColumnName().toCql(false));
|
||||
}
|
||||
}
|
||||
|
||||
if (names.size() > 0) {
|
||||
if (ttl != null) {
|
||||
names.forEach(name -> pojo.put(CacheUtil.ttlKey(name), ttl));
|
||||
}
|
||||
if (writeTime != 0L) {
|
||||
names.forEach(name -> pojo.put(CacheUtil.writeTimeKey(name), writeTime));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public E sync(UnitOfWork uow) {// throws TimeoutException {
|
||||
protected boolean isIdempotentOperation() {
|
||||
return assignments
|
||||
.values()
|
||||
.stream()
|
||||
.allMatch(
|
||||
facet -> {
|
||||
if (facet != null) {
|
||||
Set<HelenusProperty> props = facet.getProperties();
|
||||
if (props != null && props.size() > 0) {
|
||||
return props.stream().allMatch(prop -> prop.isIdempotent());
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
} else {
|
||||
// In this case our UPDATE statement made mutations via the List, Set, Map methods only.
|
||||
return false;
|
||||
}
|
||||
})
|
||||
|| super.isIdempotentOperation();
|
||||
}
|
||||
|
||||
@Override
|
||||
public E sync() throws TimeoutException {
|
||||
E result = super.sync();
|
||||
if (result != null && entity.isCacheable()) {
|
||||
if (draft != null) {
|
||||
adjustTtlAndWriteTime(draft);
|
||||
adjustTtlAndWriteTime((MapExportable) result);
|
||||
sessionOps.updateCache(result, bindFacetValues());
|
||||
} else if (pojo != null) {
|
||||
adjustTtlAndWriteTime((MapExportable) pojo);
|
||||
sessionOps.updateCache(pojo, bindFacetValues());
|
||||
} else {
|
||||
sessionOps.cacheEvict(bindFacetValues());
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public E sync(UnitOfWork uow) throws TimeoutException {
|
||||
if (uow == null) {
|
||||
return sync();
|
||||
}
|
||||
E result = super.sync(uow);
|
||||
// TODO(gburd): Only drafted entity objects are updated in the cache at this
|
||||
// time.
|
||||
if (result != null) {
|
||||
if (draft != null) {
|
||||
updateCache(uow, result, getFacets());
|
||||
adjustTtlAndWriteTime(draft);
|
||||
}
|
||||
if (entity != null && MapExportable.class.isAssignableFrom(entity.getMappingInterface())) {
|
||||
adjustTtlAndWriteTime((MapExportable) result);
|
||||
cacheUpdate(uow, result, bindFacetValues());
|
||||
} else if (pojo != null) {
|
||||
adjustTtlAndWriteTime((MapExportable) pojo);
|
||||
cacheUpdate(uow, (E) pojo, bindFacetValues());
|
||||
return (E) pojo;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public E batch(UnitOfWork uow) throws TimeoutException {
|
||||
if (uow == null) {
|
||||
throw new HelenusException("UnitOfWork cannot be null when batching operations.");
|
||||
}
|
||||
|
||||
final E result;
|
||||
if (draft != null) {
|
||||
result = draft.build();
|
||||
adjustTtlAndWriteTime(draft);
|
||||
} else if (pojo != null) {
|
||||
result = (E) pojo;
|
||||
adjustTtlAndWriteTime((MapExportable) pojo);
|
||||
} else {
|
||||
result = null;
|
||||
}
|
||||
|
||||
if (result != null) {
|
||||
cacheUpdate(uow, result, bindFacetValues());
|
||||
uow.batch(this);
|
||||
return result;
|
||||
}
|
||||
|
||||
return sync(uow);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Facet> bindFacetValues() {
|
||||
List<Facet> facets = bindFacetValues(entity.getFacets());
|
||||
facets.addAll(
|
||||
assignments
|
||||
.values()
|
||||
.stream()
|
||||
.distinct()
|
||||
.filter(o -> o != null)
|
||||
.collect(Collectors.toList()));
|
||||
return facets;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Facet> getFacets() {
|
||||
if (entity != null) {
|
||||
return entity.getFacets();
|
||||
} else {
|
||||
return new ArrayList<Facet>();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -19,13 +20,17 @@ import java.util.HashMap;
|
|||
import java.util.Map;
|
||||
|
||||
public enum DefaultPrimitiveTypes {
|
||||
BOOLEAN(boolean.class, false), BYTE(byte.class, (byte) 0x0), CHAR(char.class, (char) 0x0), SHORT(short.class,
|
||||
(short) 0), INT(int.class, 0), LONG(long.class, 0L), FLOAT(float.class, 0.0f), DOUBLE(double.class, 0.0);
|
||||
BOOLEAN(boolean.class, false),
|
||||
BYTE(byte.class, (byte) 0x0),
|
||||
CHAR(char.class, (char) 0x0),
|
||||
SHORT(short.class, (short) 0),
|
||||
INT(int.class, 0),
|
||||
LONG(long.class, 0L),
|
||||
FLOAT(float.class, 0.0f),
|
||||
DOUBLE(double.class, 0.0);
|
||||
|
||||
private final Class<?> primitiveClass;
|
||||
private final Object defaultValue;
|
||||
|
||||
private static final Map<Class<?>, DefaultPrimitiveTypes> map = new HashMap<Class<?>, DefaultPrimitiveTypes>();
|
||||
private static final Map<Class<?>, DefaultPrimitiveTypes> map =
|
||||
new HashMap<Class<?>, DefaultPrimitiveTypes>();
|
||||
|
||||
static {
|
||||
for (DefaultPrimitiveTypes type : DefaultPrimitiveTypes.values()) {
|
||||
|
@ -33,6 +38,9 @@ public enum DefaultPrimitiveTypes {
|
|||
}
|
||||
}
|
||||
|
||||
private final Class<?> primitiveClass;
|
||||
private final Object defaultValue;
|
||||
|
||||
private DefaultPrimitiveTypes(Class<?> primitiveClass, Object defaultValue) {
|
||||
this.primitiveClass = primitiveClass;
|
||||
this.defaultValue = defaultValue;
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -22,4 +23,6 @@ public interface Drafted<T> extends MapExportable {
|
|||
Set<String> mutated();
|
||||
|
||||
T build();
|
||||
|
||||
Set<String> read();
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,7 +17,6 @@
|
|||
package net.helenus.core.reflect;
|
||||
|
||||
import com.datastax.driver.core.Metadata;
|
||||
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
|
||||
public interface DslExportable {
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -15,15 +16,14 @@
|
|||
*/
|
||||
package net.helenus.core.reflect;
|
||||
|
||||
import com.datastax.driver.core.*;
|
||||
import java.lang.reflect.InvocationHandler;
|
||||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.Proxy;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
import com.datastax.driver.core.*;
|
||||
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.mapping.HelenusMappingEntity;
|
||||
|
@ -36,20 +36,19 @@ import net.helenus.support.HelenusException;
|
|||
|
||||
public class DslInvocationHandler<E> implements InvocationHandler {
|
||||
|
||||
private final Class<E> iface;
|
||||
private final ClassLoader classLoader;
|
||||
private final Optional<HelenusPropertyNode> parent;
|
||||
private final Map<Method, HelenusProperty> map = new HashMap<Method, HelenusProperty>();
|
||||
private final Map<Method, Object> udtMap = new HashMap<Method, Object>();
|
||||
private final Map<Method, Object> tupleMap = new HashMap<Method, Object>();
|
||||
private HelenusEntity entity = null;
|
||||
private Metadata metadata = null;
|
||||
|
||||
private final Class<E> iface;
|
||||
private final ClassLoader classLoader;
|
||||
|
||||
private final Optional<HelenusPropertyNode> parent;
|
||||
|
||||
private final Map<Method, HelenusProperty> map = new HashMap<Method, HelenusProperty>();
|
||||
|
||||
private final Map<Method, Object> udtMap = new HashMap<Method, Object>();
|
||||
private final Map<Method, Object> tupleMap = new HashMap<Method, Object>();
|
||||
|
||||
public DslInvocationHandler(Class<E> iface, ClassLoader classLoader, Optional<HelenusPropertyNode> parent,
|
||||
public DslInvocationHandler(
|
||||
Class<E> iface,
|
||||
ClassLoader classLoader,
|
||||
Optional<HelenusPropertyNode> parent,
|
||||
Metadata metadata) {
|
||||
|
||||
this.metadata = metadata;
|
||||
|
@ -67,8 +66,9 @@ public class DslInvocationHandler<E> implements InvocationHandler {
|
|||
|
||||
private HelenusEntity init(Metadata metadata) {
|
||||
HelenusEntity entity = new HelenusMappingEntity(iface, metadata);
|
||||
|
||||
for (HelenusProperty prop : entity.getOrderedProperties()) {
|
||||
Collection<HelenusProperty> properties = entity.getOrderedProperties();
|
||||
if (properties != null) {
|
||||
for (HelenusProperty prop : properties) {
|
||||
|
||||
map.put(prop.getGetterMethod(), prop);
|
||||
|
||||
|
@ -77,7 +77,11 @@ public class DslInvocationHandler<E> implements InvocationHandler {
|
|||
|
||||
if (type instanceof UDTDataType && !UDTValue.class.isAssignableFrom(javaType)) {
|
||||
|
||||
Object childDsl = Helenus.dsl(javaType, classLoader, Optional.of(new HelenusPropertyNode(prop, parent)),
|
||||
Object childDsl =
|
||||
Helenus.dsl(
|
||||
javaType,
|
||||
classLoader,
|
||||
Optional.of(new HelenusPropertyNode(prop, parent)),
|
||||
metadata);
|
||||
|
||||
udtMap.put(prop.getGetterMethod(), childDsl);
|
||||
|
@ -86,15 +90,21 @@ public class DslInvocationHandler<E> implements InvocationHandler {
|
|||
if (type instanceof DTDataType) {
|
||||
DTDataType dataType = (DTDataType) type;
|
||||
|
||||
if (dataType.getDataType() instanceof TupleType && !TupleValue.class.isAssignableFrom(javaType)) {
|
||||
if (dataType.getDataType() instanceof TupleType
|
||||
&& !TupleValue.class.isAssignableFrom(javaType)) {
|
||||
|
||||
Object childDsl = Helenus.dsl(javaType, classLoader,
|
||||
Optional.of(new HelenusPropertyNode(prop, parent)), metadata);
|
||||
Object childDsl =
|
||||
Helenus.dsl(
|
||||
javaType,
|
||||
classLoader,
|
||||
Optional.of(new HelenusPropertyNode(prop, parent)),
|
||||
metadata);
|
||||
|
||||
tupleMap.put(prop.getGetterMethod(), childDsl);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return entity;
|
||||
}
|
||||
|
@ -116,7 +126,9 @@ public class DslInvocationHandler<E> implements InvocationHandler {
|
|||
return false;
|
||||
}
|
||||
|
||||
if (DslExportable.SET_METADATA_METHOD.equals(methodName) && args.length == 1 && args[0] instanceof Metadata) {
|
||||
if (DslExportable.SET_METADATA_METHOD.equals(methodName)
|
||||
&& args.length == 1
|
||||
&& args[0] instanceof Metadata) {
|
||||
if (metadata == null) {
|
||||
this.setCassandraMetadataForHelenusSession((Metadata) args[0]);
|
||||
}
|
||||
|
@ -170,7 +182,7 @@ public class DslInvocationHandler<E> implements InvocationHandler {
|
|||
DataType dt = dataType.getDataType();
|
||||
|
||||
switch (dt.getName()) {
|
||||
case TUPLE :
|
||||
case TUPLE:
|
||||
Object childDsl = tupleMap.get(method);
|
||||
|
||||
if (childDsl != null) {
|
||||
|
@ -179,16 +191,16 @@ public class DslInvocationHandler<E> implements InvocationHandler {
|
|||
|
||||
break;
|
||||
|
||||
case SET :
|
||||
case SET:
|
||||
return new SetDsl(new HelenusPropertyNode(prop, parent));
|
||||
|
||||
case LIST :
|
||||
case LIST:
|
||||
return new ListDsl(new HelenusPropertyNode(prop, parent));
|
||||
|
||||
case MAP :
|
||||
case MAP:
|
||||
return new MapDsl(new HelenusPropertyNode(prop, parent));
|
||||
|
||||
default :
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
|
73
src/main/java/net/helenus/core/reflect/Entity.java
Normal file
73
src/main/java/net/helenus/core/reflect/Entity.java
Normal file
|
@ -0,0 +1,73 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.helenus.core.reflect;
|
||||
|
||||
import net.helenus.core.Getter;
|
||||
|
||||
public interface Entity {
|
||||
String WRITTEN_AT_METHOD = "writtenAt";
|
||||
String TTL_OF_METHOD = "ttlOf";
|
||||
String TOKEN_OF_METHOD = "tokenOf";
|
||||
|
||||
/**
|
||||
* The write time for the property in question referenced by the getter.
|
||||
*
|
||||
* @param getter the property getter
|
||||
* @return the timestamp associated with the property identified by the getter
|
||||
*/
|
||||
default Long writtenAt(Getter getter) {
|
||||
return 0L;
|
||||
}
|
||||
|
||||
/**
|
||||
* The write time for the property in question referenced by the property name.
|
||||
*
|
||||
* @param prop the name of a property in this entity
|
||||
* @return the timestamp associated with the property identified by the property name if it exists
|
||||
*/
|
||||
default Long writtenAt(String prop) {
|
||||
return 0L;
|
||||
};
|
||||
|
||||
/**
|
||||
* The time-to-live for the property in question referenced by the getter.
|
||||
*
|
||||
* @param getter the property getter
|
||||
* @return the time-to-live in seconds associated with the property identified by the getter
|
||||
*/
|
||||
default Integer ttlOf(Getter getter) {
|
||||
return 0;
|
||||
};
|
||||
|
||||
/**
|
||||
* The time-to-live for the property in question referenced by the property name.
|
||||
*
|
||||
* @param prop the name of a property in this entity
|
||||
* @return the time-to-live in seconds associated with the property identified by the property name if it exists
|
||||
*/
|
||||
default Integer ttlOf(String prop) {
|
||||
return 0;
|
||||
};
|
||||
|
||||
/**
|
||||
* The token (partition identifier) for this entity which can change over time if
|
||||
* the cluster grows or shrinks but should be stable otherwise.
|
||||
*
|
||||
* @return the token for the entity
|
||||
*/
|
||||
default Long tokenOf() { return 0L; }
|
||||
}
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -19,9 +20,7 @@ import java.lang.annotation.Annotation;
|
|||
import java.lang.reflect.Method;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Function;
|
||||
|
||||
import javax.validation.ConstraintValidator;
|
||||
|
||||
import net.helenus.core.SessionRepository;
|
||||
import net.helenus.mapping.*;
|
||||
import net.helenus.mapping.type.AbstractDataType;
|
||||
|
@ -65,6 +64,11 @@ public final class HelenusNamedProperty implements HelenusProperty {
|
|||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isIdempotent() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<?> getJavaType() {
|
||||
throw new HelenusMappingException("will never called");
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -17,7 +18,6 @@ package net.helenus.core.reflect;
|
|||
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import net.helenus.mapping.HelenusEntity;
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,7 +17,6 @@
|
|||
package net.helenus.core.reflect;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
import net.helenus.support.DslPropertyException;
|
||||
import net.helenus.support.HelenusMappingException;
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -19,7 +20,6 @@ import java.util.Collection;
|
|||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
|
||||
import net.helenus.mapping.HelenusProperty;
|
||||
import net.helenus.support.DslPropertyException;
|
||||
import net.helenus.support.HelenusMappingException;
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,10 +17,25 @@
|
|||
package net.helenus.core.reflect;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import net.helenus.core.Getter;
|
||||
|
||||
public interface MapExportable {
|
||||
|
||||
public static final String TO_MAP_METHOD = "toMap";
|
||||
String TO_MAP_METHOD = "toMap";
|
||||
String TO_READ_SET_METHOD = "toReadSet";
|
||||
String PUT_METHOD = "put";
|
||||
|
||||
Map<String, Object> toMap();
|
||||
|
||||
default Map<String, Object> toMap(boolean mutable) {
|
||||
return null;
|
||||
}
|
||||
|
||||
default Set<String> toReadSet() {
|
||||
return null;
|
||||
}
|
||||
|
||||
default void put(String key, Object value) {}
|
||||
|
||||
default <T> void put(Getter<T> getter, T value) {}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -15,23 +16,32 @@
|
|||
*/
|
||||
package net.helenus.core.reflect;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import java.io.InvalidObjectException;
|
||||
import java.io.ObjectInputStream;
|
||||
import java.io.ObjectStreamException;
|
||||
import java.io.Serializable;
|
||||
import java.lang.invoke.MethodHandles;
|
||||
import java.lang.reflect.Constructor;
|
||||
import java.lang.reflect.InvocationHandler;
|
||||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.Proxy;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
|
||||
import java.util.*;
|
||||
import net.helenus.core.Getter;
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.cache.CacheUtil;
|
||||
import net.helenus.mapping.MappingUtil;
|
||||
import net.helenus.mapping.annotation.Transient;
|
||||
import net.helenus.mapping.value.ValueProviderMap;
|
||||
import net.helenus.support.HelenusException;
|
||||
|
||||
public class MapperInvocationHandler<E> implements InvocationHandler, Serializable {
|
||||
private static final long serialVersionUID = -7044209982830584984L;
|
||||
|
||||
private final Map<String, Object> src;
|
||||
private Map<String, Object> src;
|
||||
private final Set<String> read = new HashSet<String>();
|
||||
private final Class<E> iface;
|
||||
|
||||
public MapperInvocationHandler(Class<E> iface, Map<String, Object> src) {
|
||||
|
@ -47,18 +57,30 @@ public class MapperInvocationHandler<E> implements InvocationHandler, Serializab
|
|||
// https://zeroturnaround.com/rebellabs/recognize-and-conquer-java-proxies-default-methods-and-method-handles/
|
||||
|
||||
// First, we need an instance of a private inner-class found in MethodHandles.
|
||||
Constructor<MethodHandles.Lookup> constructor = MethodHandles.Lookup.class.getDeclaredConstructor(Class.class,
|
||||
int.class);
|
||||
Constructor<MethodHandles.Lookup> constructor =
|
||||
MethodHandles.Lookup.class.getDeclaredConstructor(Class.class, int.class);
|
||||
constructor.setAccessible(true);
|
||||
|
||||
// Now we need to lookup and invoke special the default method on the interface
|
||||
// class.
|
||||
final Class<?> declaringClass = method.getDeclaringClass();
|
||||
Object result = constructor.newInstance(declaringClass, MethodHandles.Lookup.PRIVATE)
|
||||
.unreflectSpecial(method, declaringClass).bindTo(proxy).invokeWithArguments(args);
|
||||
Object result =
|
||||
constructor
|
||||
.newInstance(declaringClass, MethodHandles.Lookup.PRIVATE)
|
||||
.unreflectSpecial(method, declaringClass)
|
||||
.bindTo(proxy)
|
||||
.invokeWithArguments(args);
|
||||
return result;
|
||||
}
|
||||
|
||||
private Object writeReplace() {
|
||||
return new SerializationProxy<E>(this);
|
||||
}
|
||||
|
||||
private void readObject(ObjectInputStream stream) throws InvalidObjectException {
|
||||
throw new InvalidObjectException("Proxy required.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
|
||||
|
||||
|
@ -79,12 +101,96 @@ public class MapperInvocationHandler<E> implements InvocationHandler, Serializab
|
|||
return true;
|
||||
}
|
||||
}
|
||||
if (otherObj instanceof MapExportable && src.equals(((MapExportable) otherObj).toMap())) {
|
||||
return true;
|
||||
if (otherObj instanceof MapExportable) {
|
||||
return MappingUtil.compareMaps((MapExportable) otherObj, src);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
if (MapExportable.PUT_METHOD.equals(methodName) && method.getParameterCount() == 2) {
|
||||
final String key;
|
||||
if (args[0] instanceof String) {
|
||||
key = (String) args[0];
|
||||
} else if (args[0] instanceof Getter) {
|
||||
key = MappingUtil.resolveMappingProperty((Getter) args[0]).getProperty().getPropertyName();
|
||||
} else {
|
||||
key = null;
|
||||
}
|
||||
if (key != null) {
|
||||
final Object value = (Object) args[1];
|
||||
if (src instanceof ValueProviderMap) {
|
||||
this.src = fromValueProviderMap(src);
|
||||
}
|
||||
src.put(key, value);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
if (Entity.WRITTEN_AT_METHOD.equals(methodName) && method.getParameterCount() == 1) {
|
||||
final String key;
|
||||
if (args[0] instanceof String) {
|
||||
key = CacheUtil.writeTimeKey((String) args[0]);
|
||||
} else if (args[0] instanceof Getter) {
|
||||
Getter getter = (Getter) args[0];
|
||||
key =
|
||||
CacheUtil.writeTimeKey(
|
||||
MappingUtil.resolveMappingProperty(getter)
|
||||
.getProperty()
|
||||
.getColumnName()
|
||||
.toCql(false));
|
||||
} else {
|
||||
return 0L;
|
||||
}
|
||||
Long v = (Long) src.get(key);
|
||||
if (v != null) {
|
||||
return v;
|
||||
}
|
||||
return 0L;
|
||||
}
|
||||
|
||||
if (Entity.TOKEN_OF_METHOD.equals(methodName) && method.getParameterCount() == 0) {
|
||||
Long v = (Long) src.get("");
|
||||
if (v != null) {
|
||||
return v;
|
||||
}
|
||||
return 0L;
|
||||
}
|
||||
|
||||
if (Entity.TTL_OF_METHOD.equals(methodName) && method.getParameterCount() == 1) {
|
||||
final String key;
|
||||
if (args[0] instanceof String) {
|
||||
key = CacheUtil.ttlKey((String) args[0]);
|
||||
} else if (args[0] instanceof Getter) {
|
||||
Getter getter = (Getter) args[0];
|
||||
key =
|
||||
CacheUtil.ttlKey(
|
||||
MappingUtil.resolveMappingProperty(getter)
|
||||
.getProperty()
|
||||
.getColumnName()
|
||||
.toCql(false));
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
int v[] = (int[]) src.get(key);
|
||||
if (v != null) {
|
||||
return v[0];
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (MapExportable.TO_MAP_METHOD.equals(methodName)) {
|
||||
if (method.getParameterCount() == 1 && args[0] instanceof Boolean) {
|
||||
if ((boolean) args[0] == true) {
|
||||
return fromValueProviderMap(src, true);
|
||||
}
|
||||
}
|
||||
return Collections.unmodifiableMap(src);
|
||||
}
|
||||
|
||||
if (MapExportable.TO_READ_SET_METHOD.equals(methodName)) {
|
||||
return read;
|
||||
}
|
||||
|
||||
if (method.getParameterCount() != 0 || method.getReturnType() == void.class) {
|
||||
throw new HelenusException("invalid getter method " + method);
|
||||
}
|
||||
|
@ -97,30 +203,33 @@ public class MapperInvocationHandler<E> implements InvocationHandler, Serializab
|
|||
return iface.getSimpleName() + ": " + src.toString();
|
||||
}
|
||||
|
||||
if ("writeReplace".equals(methodName)) {
|
||||
return new SerializationProxy(this);
|
||||
}
|
||||
|
||||
if ("readObject".equals(methodName)) {
|
||||
throw new InvalidObjectException("Proxy required.");
|
||||
}
|
||||
|
||||
if ("dsl".equals(methodName)) {
|
||||
return Helenus.dsl(iface);
|
||||
}
|
||||
|
||||
if (MapExportable.TO_MAP_METHOD.equals(methodName)) {
|
||||
return Collections.unmodifiableMap(src);
|
||||
}
|
||||
|
||||
Object value = src.get(methodName);
|
||||
|
||||
Class<?> returnType = method.getReturnType();
|
||||
final Object value = src.get(methodName);
|
||||
read.add(methodName);
|
||||
|
||||
if (value == null) {
|
||||
|
||||
Class<?> returnType = method.getReturnType();
|
||||
|
||||
// Default implementations of non-Transient methods in entities are the default
|
||||
// value when the
|
||||
// map contains 'null'.
|
||||
// value when the map contains 'null'.
|
||||
if (method.isDefault()) {
|
||||
return invokeDefault(proxy, method, args);
|
||||
}
|
||||
|
||||
// Otherwise, if the return type of the method is a primitive Java type then
|
||||
// we'll return the standard
|
||||
// default values to avoid a NPE in user code.
|
||||
// we'll return the standard default values to avoid a NPE in user code.
|
||||
if (returnType.isPrimitive()) {
|
||||
DefaultPrimitiveTypes type = DefaultPrimitiveTypes.lookup(returnType);
|
||||
if (type == null) {
|
||||
|
@ -132,4 +241,54 @@ public class MapperInvocationHandler<E> implements InvocationHandler, Serializab
|
|||
|
||||
return value;
|
||||
}
|
||||
|
||||
static Map<String, Object> fromValueProviderMap(Map v) {
|
||||
return fromValueProviderMap(v, false);
|
||||
}
|
||||
|
||||
static Map<String, Object> fromValueProviderMap(Map v, boolean mutable) {
|
||||
if (v instanceof ValueProviderMap) {
|
||||
Map<String, Object> m = new HashMap<String, Object>(v.size());
|
||||
Set<String> keys = v.keySet();
|
||||
for (String key : keys) {
|
||||
Object value = v.get(key);
|
||||
if (value != null && mutable) {
|
||||
if (ImmutableList.class.isAssignableFrom(value.getClass())) {
|
||||
m.put(key, new ArrayList((List) value));
|
||||
} else if (ImmutableMap.class.isAssignableFrom(value.getClass())) {
|
||||
m.put(key, new HashMap((Map) value));
|
||||
} else if (ImmutableSet.class.isAssignableFrom(value.getClass())) {
|
||||
m.put(key, new HashSet((Set) value));
|
||||
} else {
|
||||
m.put(key, value);
|
||||
}
|
||||
} else {
|
||||
m.put(key, value);
|
||||
}
|
||||
}
|
||||
return m;
|
||||
}
|
||||
return v;
|
||||
}
|
||||
|
||||
static class SerializationProxy<E> implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = -5617583940055969353L;
|
||||
|
||||
private final Class<E> iface;
|
||||
private final Map<String, Object> src;
|
||||
|
||||
public SerializationProxy(MapperInvocationHandler mapper) {
|
||||
this.iface = mapper.iface;
|
||||
if (mapper.src instanceof ValueProviderMap) {
|
||||
this.src = fromValueProviderMap(mapper.src);
|
||||
} else {
|
||||
this.src = mapper.src;
|
||||
}
|
||||
}
|
||||
|
||||
Object readResolve() throws ObjectStreamException {
|
||||
return new MapperInvocationHandler(iface, src);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -15,11 +16,9 @@
|
|||
*/
|
||||
package net.helenus.core.reflect;
|
||||
|
||||
import com.datastax.driver.core.Metadata;
|
||||
import java.lang.reflect.Proxy;
|
||||
import java.util.Optional;
|
||||
|
||||
import com.datastax.driver.core.Metadata;
|
||||
|
||||
import net.helenus.core.DslInstantiator;
|
||||
|
||||
public enum ReflectionDslInstantiator implements DslInstantiator {
|
||||
|
@ -27,10 +26,15 @@ public enum ReflectionDslInstantiator implements DslInstantiator {
|
|||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public <E> E instantiate(Class<E> iface, ClassLoader classLoader, Optional<HelenusPropertyNode> parent,
|
||||
public <E> E instantiate(
|
||||
Class<E> iface,
|
||||
ClassLoader classLoader,
|
||||
Optional<HelenusPropertyNode> parent,
|
||||
Metadata metadata) {
|
||||
DslInvocationHandler<E> handler = new DslInvocationHandler<E>(iface, classLoader, parent, metadata);
|
||||
E proxy = (E) Proxy.newProxyInstance(classLoader, new Class[]{iface, DslExportable.class}, handler);
|
||||
DslInvocationHandler<E> handler =
|
||||
new DslInvocationHandler<E>(iface, classLoader, parent, metadata);
|
||||
E proxy =
|
||||
(E) Proxy.newProxyInstance(classLoader, new Class[] {iface, DslExportable.class}, handler);
|
||||
return proxy;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -19,8 +20,7 @@ import net.helenus.support.HelenusMappingException;
|
|||
|
||||
public final class ReflectionInstantiator {
|
||||
|
||||
private ReflectionInstantiator() {
|
||||
}
|
||||
private ReflectionInstantiator() {}
|
||||
|
||||
public static <T> T instantiateClass(Class<T> clazz) {
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -15,9 +16,9 @@
|
|||
*/
|
||||
package net.helenus.core.reflect;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.lang.reflect.Proxy;
|
||||
import java.util.Map;
|
||||
|
||||
import net.helenus.core.MapperInstantiator;
|
||||
|
||||
public enum ReflectionMapperInstantiator implements MapperInstantiator {
|
||||
|
@ -28,7 +29,10 @@ public enum ReflectionMapperInstantiator implements MapperInstantiator {
|
|||
public <E> E instantiate(Class<E> iface, Map<String, Object> src, ClassLoader classLoader) {
|
||||
|
||||
MapperInvocationHandler<E> handler = new MapperInvocationHandler<E>(iface, src);
|
||||
E proxy = (E) Proxy.newProxyInstance(classLoader, new Class[]{iface, MapExportable.class}, handler);
|
||||
E proxy =
|
||||
(E)
|
||||
Proxy.newProxyInstance(
|
||||
classLoader, new Class[] {iface, MapExportable.class, Serializable.class}, handler);
|
||||
return proxy;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -18,7 +19,6 @@ package net.helenus.core.reflect;
|
|||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
import java.util.Set;
|
||||
|
||||
import net.helenus.support.HelenusMappingException;
|
||||
|
||||
public final class SetDsl<V> implements Set<V> {
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,7 +17,6 @@
|
|||
package net.helenus.mapping;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
|
||||
import net.helenus.mapping.annotation.ClusteringColumn;
|
||||
import net.helenus.mapping.annotation.Column;
|
||||
import net.helenus.mapping.annotation.PartitionKey;
|
||||
|
@ -103,13 +103,21 @@ public final class ColumnInformation {
|
|||
private void ensureSingleColumnType(ColumnType columnTypeLocal, Method getter) {
|
||||
|
||||
if (columnTypeLocal != ColumnType.COLUMN) {
|
||||
throw new HelenusMappingException("property can be annotated only by a single column type " + getter);
|
||||
throw new HelenusMappingException(
|
||||
"property can be annotated only by a single column type " + getter);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ColumnInformation [columnName=" + columnName + ", columnType=" + columnType + ", ordinal=" + ordinal
|
||||
+ ", ordering=" + ordering + "]";
|
||||
return "ColumnInformation [columnName="
|
||||
+ columnName
|
||||
+ ", columnType="
|
||||
+ columnType
|
||||
+ ", ordinal="
|
||||
+ ordinal
|
||||
+ ", ordering="
|
||||
+ ordering
|
||||
+ "]";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,5 +17,8 @@
|
|||
package net.helenus.mapping;
|
||||
|
||||
public enum ColumnType {
|
||||
PARTITION_KEY, CLUSTERING_COLUMN, STATIC_COLUMN, COLUMN;
|
||||
PARTITION_KEY,
|
||||
CLUSTERING_COLUMN,
|
||||
STATIC_COLUMN,
|
||||
COLUMN;
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -17,7 +18,6 @@ package net.helenus.mapping;
|
|||
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
import net.helenus.core.cache.Facet;
|
||||
|
||||
public interface HelenusEntity {
|
||||
|
@ -35,4 +35,6 @@ public interface HelenusEntity {
|
|||
HelenusProperty getProperty(String name);
|
||||
|
||||
List<Facet> getFacets();
|
||||
|
||||
boolean isDraftable();
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,5 +17,8 @@
|
|||
package net.helenus.mapping;
|
||||
|
||||
public enum HelenusEntityType {
|
||||
TABLE, VIEW, TUPLE, UDT;
|
||||
TABLE,
|
||||
VIEW,
|
||||
TUPLE,
|
||||
UDT;
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/*
|
||||
*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
|
@ -15,23 +16,23 @@
|
|||
*/
|
||||
package net.helenus.mapping;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.*;
|
||||
|
||||
import org.apache.commons.lang3.ClassUtils;
|
||||
|
||||
import com.datastax.driver.core.DefaultMetadata;
|
||||
import com.datastax.driver.core.Metadata;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.*;
|
||||
import javax.validation.ConstraintValidator;
|
||||
import net.helenus.config.HelenusSettings;
|
||||
import net.helenus.core.Helenus;
|
||||
import net.helenus.core.annotation.Cacheable;
|
||||
import net.helenus.core.cache.Facet;
|
||||
import net.helenus.core.cache.UnboundFacet;
|
||||
import net.helenus.mapping.annotation.*;
|
||||
import net.helenus.mapping.validator.DistinctValidator;
|
||||
import net.helenus.support.HelenusMappingException;
|
||||
import org.apache.commons.lang3.ClassUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
public final class HelenusMappingEntity implements HelenusEntity {
|
||||
|
||||
|
@ -39,6 +40,7 @@ public final class HelenusMappingEntity implements HelenusEntity {
|
|||
private final HelenusEntityType type;
|
||||
private final IdentityName name;
|
||||
private final boolean cacheable;
|
||||
private final boolean draftable;
|
||||
private final ImmutableMap<String, Method> methods;
|
||||
private final ImmutableMap<String, HelenusProperty> props;
|
||||
private final ImmutableList<HelenusProperty> orderedProps;
|
||||
|
@ -66,7 +68,8 @@ public final class HelenusMappingEntity implements HelenusEntity {
|
|||
}
|
||||
|
||||
for (Class<?> c : ClassUtils.getAllInterfaces(iface)) {
|
||||
if (c.getDeclaredAnnotation(Table.class) != null || c.getDeclaredAnnotation(InheritedTable.class) != null) {
|
||||
if (c.getDeclaredAnnotation(Table.class) != null
|
||||
|| c.getDeclaredAnnotation(InheritedTable.class) != null) {
|
||||
for (Method m : c.getDeclaredMethods()) {
|
||||
Method o = methods.get(m.getName());
|
||||
if (o != null) {
|
||||
|
@ -111,24 +114,60 @@ public final class HelenusMappingEntity implements HelenusEntity {
|
|||
// Caching
|
||||
cacheable = (null != iface.getDeclaredAnnotation(Cacheable.class));
|
||||
|
||||
// Draft
|
||||
Class<?> draft;
|
||||
try {
|
||||
draft = Class.forName(iface.getName() + "$Draft");
|
||||
} catch (Exception ignored) {
|
||||
draft = null;
|
||||
}
|
||||
draftable = (draft != null);
|
||||
|
||||
// Materialized view
|
||||
List<HelenusProperty> primaryKeyProperties = new ArrayList<>();
|
||||
ImmutableList.Builder<Facet> facetsBuilder = ImmutableList.builder();
|
||||
if (iface.getDeclaredAnnotation(MaterializedView.class) == null) {
|
||||
facetsBuilder.add(new Facet("table", name.toCql()).setFixed());
|
||||
} else {
|
||||
facetsBuilder.add(
|
||||
new Facet("table", Helenus.entity(iface.getInterfaces()[0]).getName().toCql())
|
||||
.setFixed());
|
||||
}
|
||||
for (HelenusProperty prop : orderedProps) {
|
||||
switch (prop.getColumnType()) {
|
||||
case PARTITION_KEY :
|
||||
case CLUSTERING_COLUMN :
|
||||
case PARTITION_KEY:
|
||||
case CLUSTERING_COLUMN:
|
||||
primaryKeyProperties.add(prop);
|
||||
break;
|
||||
default :
|
||||
default:
|
||||
if (primaryKeyProperties != null && primaryKeyProperties.size() > 0) {
|
||||
facetsBuilder.add(new UnboundFacet(primaryKeyProperties));
|
||||
primaryKeyProperties = null;
|
||||
}
|
||||
Optional<IdentityName> optionalIndexName = prop.getIndexName();
|
||||
if (optionalIndexName.isPresent()) {
|
||||
UnboundFacet facet = new UnboundFacet(prop);
|
||||
for (ConstraintValidator<?, ?> constraint :
|
||||
MappingUtil.getValidators(prop.getGetterMethod())) {
|
||||
if (constraint instanceof DistinctValidator) {
|
||||
DistinctValidator validator = (DistinctValidator) constraint;
|
||||
String[] values = validator.constraintAnnotation.value();
|
||||
UnboundFacet facet;
|
||||
if (values != null && values.length >= 1 && !(StringUtils.isBlank(values[0]))) {
|
||||
List<HelenusProperty> props = new ArrayList<HelenusProperty>(values.length + 1);
|
||||
props.add(prop);
|
||||
for (String value : values) {
|
||||
for (HelenusProperty p : orderedProps) {
|
||||
String name = p.getPropertyName();
|
||||
if (name.equals(value) && !name.equals(prop.getPropertyName())) {
|
||||
props.add(p);
|
||||
}
|
||||
}
|
||||
}
|
||||
facet = new UnboundFacet(props, validator.alone(), validator.combined());
|
||||
} else {
|
||||
facet = new UnboundFacet(prop, validator.alone(), validator.combined());
|
||||
}
|
||||
facetsBuilder.add(facet);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -138,6 +177,43 @@ public final class HelenusMappingEntity implements HelenusEntity {
|
|||
this.facets = facetsBuilder.build();
|
||||
}
|
||||
|
||||
private static IdentityName resolveName(Class<?> iface, HelenusEntityType type) {
|
||||
|
||||
switch (type) {
|
||||
case TABLE:
|
||||
return MappingUtil.getTableName(iface, true);
|
||||
|
||||
case VIEW:
|
||||
return MappingUtil.getViewName(iface, true);
|
||||
|
||||
case TUPLE:
|
||||
return IdentityName.of(MappingUtil.getDefaultEntityName(iface), false);
|
||||
|
||||
case UDT:
|
||||
return MappingUtil.getUserDefinedTypeName(iface, true);
|
||||
}
|
||||
|
||||
throw new HelenusMappingException("invalid entity type " + type + " in " + type);
|
||||
}
|
||||
|
||||
private static HelenusEntityType autoDetectType(Class<?> iface) {
|
||||
|
||||
Objects.requireNonNull(iface, "empty iface");
|
||||
|
||||
if (null != iface.getDeclaredAnnotation(Table.class)) {
|
||||
return HelenusEntityType.TABLE;
|
||||
} else if (null != iface.getDeclaredAnnotation(MaterializedView.class)) {
|
||||
return HelenusEntityType.VIEW;
|
||||
} else if (null != iface.getDeclaredAnnotation(Tuple.class)) {
|
||||
return HelenusEntityType.TUPLE;
|
||||
} else if (null != iface.getDeclaredAnnotation(UDT.class)) {
|
||||
return HelenusEntityType.UDT;
|
||||
}
|
||||
|
||||
throw new HelenusMappingException(
|
||||
"entity must be annotated by @Table or @Tuple or @UserDefinedType " + iface);
|
||||
}
|
||||
|
||||
@Override
|
||||
public HelenusEntityType getType() {
|
||||
return type;
|
||||
|
@ -148,6 +224,11 @@ public final class HelenusMappingEntity implements HelenusEntity {
|
|||
return cacheable;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isDraftable() {
|
||||
return draftable;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<?> getMappingInterface() {
|
||||
return iface;
|
||||
|
@ -178,54 +259,18 @@ public final class HelenusMappingEntity implements HelenusEntity {
|
|||
return name;
|
||||
}
|
||||
|
||||
private static IdentityName resolveName(Class<?> iface, HelenusEntityType type) {
|
||||
|
||||
switch (type) {
|
||||
case TABLE :
|
||||
return MappingUtil.getTableName(iface, true);
|
||||
|
||||
case VIEW :
|
||||
return MappingUtil.getViewName(iface, true);
|
||||
|
||||
case TUPLE :
|
||||
return IdentityName.of(MappingUtil.getDefaultEntityName(iface), false);
|
||||
|
||||
case UDT :
|
||||
return MappingUtil.getUserDefinedTypeName(iface, true);
|
||||
}
|
||||
|
||||
throw new HelenusMappingException("invalid entity type " + type + " in " + type);
|
||||
}
|
||||
|
||||
private static HelenusEntityType autoDetectType(Class<?> iface) {
|
||||
|
||||
Objects.requireNonNull(iface, "empty iface");
|
||||
|
||||
if (null != iface.getDeclaredAnnotation(Table.class)) {
|
||||
return HelenusEntityType.TABLE;
|
||||
} else if (null != iface.getDeclaredAnnotation(MaterializedView.class)) {
|
||||
return HelenusEntityType.VIEW;
|
||||
} else if (null != iface.getDeclaredAnnotation(Tuple.class)) {
|
||||
return HelenusEntityType.TUPLE;
|
||||
} else if (null != iface.getDeclaredAnnotation(UDT.class)) {
|
||||
return HelenusEntityType.UDT;
|
||||
}
|
||||
|
||||
throw new HelenusMappingException("entity must be annotated by @Table or @Tuple or @UserDefinedType " + iface);
|
||||
}
|
||||
|
||||
private void validateOrdinals() {
|
||||
|
||||
switch (getType()) {
|
||||
case TABLE :
|
||||
case TABLE:
|
||||
validateOrdinalsForTable();
|
||||
break;
|
||||
|
||||
case TUPLE :
|
||||
case TUPLE:
|
||||
validateOrdinalsInTuple();
|
||||
break;
|
||||
|
||||
default :
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -242,24 +287,29 @@ public final class HelenusMappingEntity implements HelenusEntity {
|
|||
int ordinal = prop.getOrdinal();
|
||||
|
||||
switch (type) {
|
||||
case PARTITION_KEY :
|
||||
case PARTITION_KEY:
|
||||
if (partitionKeys.get(ordinal)) {
|
||||
throw new HelenusMappingException(
|
||||
"detected two or more partition key columns with the same ordinal " + ordinal + " in "
|
||||
"detected two or more partition key columns with the same ordinal "
|
||||
+ ordinal
|
||||
+ " in "
|
||||
+ prop.getEntity());
|
||||
}
|
||||
partitionKeys.set(ordinal);
|
||||
break;
|
||||
|
||||
case CLUSTERING_COLUMN :
|
||||
case CLUSTERING_COLUMN:
|
||||
if (clusteringColumns.get(ordinal)) {
|
||||
throw new HelenusMappingException("detected two or clustering columns with the same ordinal "
|
||||
+ ordinal + " in " + prop.getEntity());
|
||||
throw new HelenusMappingException(
|
||||
"detected two or clustering columns with the same ordinal "
|
||||
+ ordinal
|
||||
+ " in "
|
||||
+ prop.getEntity());
|
||||
}
|
||||
clusteringColumns.set(ordinal);
|
||||
break;
|
||||
|
||||
default :
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -268,17 +318,27 @@ public final class HelenusMappingEntity implements HelenusEntity {
|
|||
private void validateOrdinalsInTuple() {
|
||||
boolean[] ordinals = new boolean[props.size()];
|
||||
|
||||
getOrderedProperties().forEach(p -> {
|
||||
getOrderedProperties()
|
||||
.forEach(
|
||||
p -> {
|
||||
int ordinal = p.getOrdinal();
|
||||
|
||||
if (ordinal < 0 || ordinal >= ordinals.length) {
|
||||
throw new HelenusMappingException("invalid ordinal " + ordinal + " found for property "
|
||||
+ p.getPropertyName() + " in " + p.getEntity());
|
||||
throw new HelenusMappingException(
|
||||
"invalid ordinal "
|
||||
+ ordinal
|
||||
+ " found for property "
|
||||
+ p.getPropertyName()
|
||||
+ " in "
|
||||
+ p.getEntity());
|
||||
}
|
||||
|
||||
if (ordinals[ordinal]) {
|
||||
throw new HelenusMappingException(
|
||||
"detected two or more properties with the same ordinal " + ordinal + " in " + p.getEntity());
|
||||
"detected two or more properties with the same ordinal "
|
||||
+ ordinal
|
||||
+ " in "
|
||||
+ p.getEntity());
|
||||
}
|
||||
|
||||
ordinals[ordinal] = true;
|
||||
|
@ -295,8 +355,12 @@ public final class HelenusMappingEntity implements HelenusEntity {
|
|||
public String toString() {
|
||||
|
||||
StringBuilder str = new StringBuilder();
|
||||
str.append(iface.getSimpleName()).append("(").append(name.getName()).append(") ")
|
||||
.append(type.name().toLowerCase()).append(":\n");
|
||||
str.append(iface.getSimpleName())
|
||||
.append("(")
|
||||
.append(name.getName())
|
||||
.append(") ")
|
||||
.append(type.name().toLowerCase())
|
||||
.append(":\n");
|
||||
|
||||
for (HelenusProperty prop : getOrderedProperties()) {
|
||||
str.append(prop.toString());
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Helenus Authors
|
||||
* Copyright (C) 2015 The Casser Authors
|
||||
* Copyright (C) 2015-2018 The Helenus Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -15,16 +16,13 @@
|
|||
*/
|
||||
package net.helenus.mapping;
|
||||
|
||||
import com.datastax.driver.core.Metadata;
|
||||
import java.lang.annotation.Annotation;
|
||||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.Type;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Function;
|
||||
|
||||
import javax.validation.ConstraintValidator;
|
||||
|
||||
import com.datastax.driver.core.Metadata;
|
||||
|
||||
import net.helenus.core.SessionRepository;
|
||||
import net.helenus.mapping.javatype.AbstractJavaType;
|
||||
import net.helenus.mapping.javatype.MappingJavaTypes;
|
||||
|
@ -38,6 +36,7 @@ public final class HelenusMappingProperty implements HelenusProperty {
|
|||
private final String propertyName;
|
||||
private final Optional<IdentityName> indexName;
|
||||
private final boolean caseSensitiveIndex;
|
||||
private final boolean idempotent;
|
||||
|
||||
private final ColumnInformation columnInfo;
|
||||
|
||||
|
@ -45,12 +44,10 @@ public final class HelenusMappingProperty implements HelenusProperty {
|
|||
private final Class<?> javaType;
|
||||
private final AbstractJavaType abstractJavaType;
|
||||
private final AbstractDataType dataType;
|
||||
|
||||
private final ConstraintValidator<? extends Annotation, ?>[] validators;
|
||||
private volatile Optional<Function<Object, Object>> readConverter = null;
|
||||
private volatile Optional<Function<Object, Object>> writeConverter = null;
|
||||
|
||||
private final ConstraintValidator<? extends Annotation, ?>[] validators;
|
||||
|
||||
public HelenusMappingProperty(HelenusMappingEntity entity, Method getter, Metadata metadata) {
|
||||
this.entity = entity;
|
||||
this.getter = getter;
|
||||
|
@ -61,12 +58,22 @@ public final class HelenusMappingProperty implements HelenusProperty {
|
|||
|
||||
this.columnInfo = new ColumnInformation(getter);
|
||||
|
||||
switch (this.columnInfo.getColumnType()) {
|
||||
case PARTITION_KEY:
|
||||
case CLUSTERING_COLUMN:
|
||||
this.idempotent = true;
|
||||
break;
|
||||
default:
|
||||
this.idempotent = MappingUtil.idempotent(getter);
|
||||
}
|
||||
|
||||
this.genericJavaType = getter.getGenericReturnType();
|
||||
this.javaType = getter.getReturnType();
|
||||
this.abstractJavaType = MappingJavaTypes.resolveJavaType(this.javaType);
|
||||
|
||||
this.dataType = abstractJavaType.resolveDataType(this.getter, this.genericJavaType,
|
||||
this.columnInfo.getColumnType(), metadata);
|
||||
this.dataType =
|
||||
abstractJavaType.resolveDataType(
|
||||
this.getter, this.genericJavaType, this.columnInfo.getColumnType(), metadata);
|
||||
|
||||
this.validators = MappingUtil.getValidators(getter);
|
||||
}
|
||||
|
@ -116,6 +123,11 @@ public final class HelenusMappingProperty implements HelenusProperty {
|
|||
return caseSensitiveIndex;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isIdempotent() {
|
||||
return idempotent;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getPropertyName() {
|
||||
return propertyName;
|
||||
|
@ -170,13 +182,13 @@ public final class HelenusMappingProperty implements HelenusProperty {
|
|||
ColumnType type = this.getColumnType();
|
||||
|
||||
switch (type) {
|
||||
case PARTITION_KEY :
|
||||
case PARTITION_KEY:
|
||||
str.append("partition_key[");
|
||||
str.append(this.getOrdinal());
|
||||
str.append("] ");
|
||||
break;
|
||||
|
||||
case CLUSTERING_COLUMN :
|
||||
case CLUSTERING_COLUMN:
|
||||
str.append("clustering_column[");
|
||||
str.append(this.getOrdinal());
|
||||
str.append("] ");
|
||||
|
@ -186,11 +198,11 @@ public final class HelenusMappingProperty implements HelenusProperty {
|
|||
}
|
||||
break;
|
||||
|
||||
case STATIC_COLUMN :
|
||||
case STATIC_COLUMN:
|
||||
str.append("static ");
|
||||
break;
|
||||
|
||||
case COLUMN :
|
||||
case COLUMN:
|
||||
break;
|
||||
}
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue