Merge branch 'develop'

This commit is contained in:
Greg Burd 2018-03-28 08:29:06 -04:00
commit 166368c9dc
75 changed files with 3612 additions and 2200 deletions

17
NOTES
View file

@ -22,6 +22,14 @@ Operation/
`-- PreparedStreamOperation `-- PreparedStreamOperation
----
@CompoundIndex()
create a new col in the same table called __idx_a_b_c that the hash of the concatenated values in that order is stored, create a normal index for that (CREATE INDEX ...)
if a query matches that set of columns then use that indexed col to fetch the desired results from that table
could also work with .in() query if materialized view exists
----
// TODO(gburd): create a statement that matches one that wasn't prepared // TODO(gburd): create a statement that matches one that wasn't prepared
//String key = //String key =
// "use " + preparedStatement.getQueryKeyspace() + "; " + preparedStatement.getQueryString(); // "use " + preparedStatement.getQueryKeyspace() + "; " + preparedStatement.getQueryString();
@ -183,3 +191,12 @@ InsertOperation
if (resultType == iface) { if (resultType == iface) {
if (values.size() > 0 && includesNonIdentityValues) { if (values.size() > 0 && includesNonIdentityValues) {
boolean immutable = iface.isAssignableFrom(Drafted.class); boolean immutable = iface.isAssignableFrom(Drafted.class);
-------------------
final Object value;
if (method.getParameterCount() == 1 && args[0] instanceof Boolean && src instanceof ValueProviderMap) {
value = ((ValueProviderMap)src).get(methodName, (Boolean)args[0]);
} else {
value = src.get(methodName);
}
--------------------

View file

@ -1,3 +1,3 @@
#!/bin/bash #!/usr/bin/env bash
mvn clean jar:jar javadoc:jar source:jar deploy -Prelease mvn clean jar:jar javadoc:jar source:jar deploy -Prelease

View file

@ -1,7 +1,14 @@
#!/bin/bash #!/usr/bin/env bash
if [ "X$1" == "Xall" ]; then
for f in $(find ./src -name \*.java); do for f in $(find ./src -name \*.java); do
echo Formatting $f echo Formatting $f
java -jar ./lib/google-java-format-1.3-all-deps.jar --replace $f java -jar ./lib/google-java-format-1.3-all-deps.jar --replace $f
done done
else
for file in $(git status --short | awk '{print $2}'); do
echo $file
java -jar ./lib/google-java-format-1.3-all-deps.jar --replace $file
done
fi

View file

@ -1,3 +1,3 @@
#!/bin/bash #!/usr/bin/env bash
mvn clean jar:jar javadoc:jar source:jar install -Prelease mvn clean jar:jar javadoc:jar source:jar install -Prelease

View file

@ -1,90 +0,0 @@
// gradle wrapper
// ./gradlew clean generateLock saveLock
// ./gradlew compileJava
// ./gradlew run
// ./gradlew run --debug-jvm
// ./gradlew publishToMavenLocal
buildscript {
ext {}
repositories {
jcenter()
mavenLocal()
mavenCentral()
maven { url "https://clojars.org/repo" }
maven { url "https://plugins.gradle.org/m2/" }
}
dependencies {
classpath 'com.netflix.nebula:gradle-dependency-lock-plugin:4.+'
classpath 'com.uber:okbuck:0.19.0'
}
}
apply plugin: 'java'
apply plugin: 'idea'
apply plugin: 'eclipse'
apply plugin: 'java-library'
apply plugin: 'maven-publish'
apply plugin: 'com.uber.okbuck'
apply plugin: 'nebula.dependency-lock'
task wrapper(type: Wrapper) {
gradleVersion = '4.0.2'
}
jar {
baseName = 'helenus'
group = 'net.helenus'
version = '2.0.17-SNAPSHOT'
}
description = """helenus"""
sourceCompatibility = 1.8
targetCompatibility = 1.8
tasks.withType(JavaCompile) {
options.encoding = 'UTF-8'
}
configurations.all {
}
repositories {
jcenter()
mavenLocal()
mavenCentral()
maven { url "file:///Users/gburd/ws/helenus/lib" }
maven { url "https://oss.sonatype.org/content/repositories/snapshots" }
maven { url "http://repo.maven.apache.org/maven2" }
}
dependencies {
compile group: 'com.datastax.cassandra', name: 'cassandra-driver-core', version: '3.3.0'
compile group: 'org.aspectj', name: 'aspectjrt', version: '1.8.10'
compile group: 'org.aspectj', name: 'aspectjweaver', version: '1.8.10'
compile group: 'org.apache.commons', name: 'commons-lang3', version: '3.6'
compile group: 'org.springframework', name: 'spring-core', version: '4.3.10.RELEASE'
compile group: 'com.google.guava', name: 'guava', version: '20.0'
compile group: 'com.diffplug.durian', name: 'durian', version: '3.+'
compile group: 'io.zipkin.java', name: 'zipkin', version: '1.29.2'
compile group: 'io.zipkin.brave', name: 'brave', version: '4.0.6'
compile group: 'io.dropwizard.metrics', name: 'metrics-core', version: '3.2.2'
compile group: 'javax.validation', name: 'validation-api', version: '2.0.0.CR3'
compile group: 'org.slf4j', name: 'slf4j-api', version: '1.7.1'
runtime group: 'org.slf4j', name: 'jcl-over-slf4j', version: '1.7.1'
testCompile group: 'org.codehaus.jackson', name: 'jackson-mapper-asl', version: '1.9.13'
testCompile group: 'com.anthemengineering.mojo', name: 'infer-maven-plugin', version: '0.1.0'
testCompile group: 'org.codehaus.jackson', name: 'jackson-core-asl', version: '1.9.13'
testCompile(group: 'org.cassandraunit', name: 'cassandra-unit', version: '3.1.4.0-SNAPSHOT') {
exclude(module: 'cassandra-driver-core')
}
testCompile group: 'org.apache.cassandra', name: 'cassandra-all', version: '3.11.0'
testCompile group: 'commons-io', name: 'commons-io', version: '2.5'
testCompile group: 'junit', name: 'junit', version: '4.12'
testCompile group: 'com.github.stephenc', name: 'jamm', version: '0.2.5'
testCompile group: 'org.hamcrest', name: 'hamcrest-library', version: '1.3'
testCompile group: 'org.hamcrest', name: 'hamcrest-core', version: '1.3'
testCompile group: 'org.mockito', name: 'mockito-core', version: '2.8.47'
}

View file

@ -1,648 +0,0 @@
{
"compile": {
"com.datastax.cassandra:cassandra-driver-core": {
"locked": "3.3.0",
"requested": "3.3.0"
},
"com.diffplug.durian:durian": {
"locked": "3.5.0-SNAPSHOT",
"requested": "3.+"
},
"com.google.guava:guava": {
"locked": "20.0",
"requested": "20.0"
},
"io.dropwizard.metrics:metrics-core": {
"locked": "3.2.2",
"requested": "3.2.2"
},
"io.zipkin.brave:brave": {
"locked": "4.0.6",
"requested": "4.0.6"
},
"io.zipkin.java:zipkin": {
"locked": "1.29.2",
"requested": "1.29.2"
},
"javax.validation:validation-api": {
"locked": "2.0.0.CR3",
"requested": "2.0.0.CR3"
},
"org.apache.commons:commons-lang3": {
"locked": "3.6",
"requested": "3.6"
},
"org.aspectj:aspectjrt": {
"locked": "1.8.10",
"requested": "1.8.10"
},
"org.aspectj:aspectjweaver": {
"locked": "1.8.10",
"requested": "1.8.10"
},
"org.slf4j:slf4j-api": {
"locked": "1.7.25",
"requested": "1.7.1"
},
"org.springframework:spring-core": {
"locked": "4.3.10.RELEASE",
"requested": "4.3.10.RELEASE"
}
},
"compileClasspath": {
"com.datastax.cassandra:cassandra-driver-core": {
"locked": "3.3.0",
"requested": "3.3.0"
},
"com.diffplug.durian:durian": {
"locked": "3.5.0-SNAPSHOT",
"requested": "3.+"
},
"com.google.guava:guava": {
"locked": "20.0",
"requested": "20.0"
},
"io.dropwizard.metrics:metrics-core": {
"locked": "3.2.2",
"requested": "3.2.2"
},
"io.zipkin.brave:brave": {
"locked": "4.0.6",
"requested": "4.0.6"
},
"io.zipkin.java:zipkin": {
"locked": "1.29.2",
"requested": "1.29.2"
},
"javax.validation:validation-api": {
"locked": "2.0.0.CR3",
"requested": "2.0.0.CR3"
},
"org.apache.commons:commons-lang3": {
"locked": "3.6",
"requested": "3.6"
},
"org.aspectj:aspectjrt": {
"locked": "1.8.10",
"requested": "1.8.10"
},
"org.aspectj:aspectjweaver": {
"locked": "1.8.10",
"requested": "1.8.10"
},
"org.slf4j:slf4j-api": {
"locked": "1.7.25",
"requested": "1.7.1"
},
"org.springframework:spring-core": {
"locked": "4.3.10.RELEASE",
"requested": "4.3.10.RELEASE"
}
},
"default": {
"com.datastax.cassandra:cassandra-driver-core": {
"locked": "3.3.0",
"requested": "3.3.0"
},
"com.diffplug.durian:durian": {
"locked": "3.5.0-SNAPSHOT",
"requested": "3.+"
},
"com.google.guava:guava": {
"locked": "20.0",
"requested": "20.0"
},
"io.dropwizard.metrics:metrics-core": {
"locked": "3.2.2",
"requested": "3.2.2"
},
"io.zipkin.brave:brave": {
"locked": "4.0.6",
"requested": "4.0.6"
},
"io.zipkin.java:zipkin": {
"locked": "1.29.2",
"requested": "1.29.2"
},
"javax.validation:validation-api": {
"locked": "2.0.0.CR3",
"requested": "2.0.0.CR3"
},
"org.apache.commons:commons-lang3": {
"locked": "3.6",
"requested": "3.6"
},
"org.aspectj:aspectjrt": {
"locked": "1.8.10",
"requested": "1.8.10"
},
"org.aspectj:aspectjweaver": {
"locked": "1.8.10",
"requested": "1.8.10"
},
"org.slf4j:jcl-over-slf4j": {
"locked": "1.7.1",
"requested": "1.7.1"
},
"org.slf4j:slf4j-api": {
"locked": "1.7.25",
"requested": "1.7.1"
},
"org.springframework:spring-core": {
"locked": "4.3.10.RELEASE",
"requested": "4.3.10.RELEASE"
}
},
"runtime": {
"com.datastax.cassandra:cassandra-driver-core": {
"locked": "3.3.0",
"requested": "3.3.0"
},
"com.diffplug.durian:durian": {
"locked": "3.5.0-SNAPSHOT",
"requested": "3.+"
},
"com.google.guava:guava": {
"locked": "20.0",
"requested": "20.0"
},
"io.dropwizard.metrics:metrics-core": {
"locked": "3.2.2",
"requested": "3.2.2"
},
"io.zipkin.brave:brave": {
"locked": "4.0.6",
"requested": "4.0.6"
},
"io.zipkin.java:zipkin": {
"locked": "1.29.2",
"requested": "1.29.2"
},
"javax.validation:validation-api": {
"locked": "2.0.0.CR3",
"requested": "2.0.0.CR3"
},
"org.apache.commons:commons-lang3": {
"locked": "3.6",
"requested": "3.6"
},
"org.aspectj:aspectjrt": {
"locked": "1.8.10",
"requested": "1.8.10"
},
"org.aspectj:aspectjweaver": {
"locked": "1.8.10",
"requested": "1.8.10"
},
"org.slf4j:jcl-over-slf4j": {
"locked": "1.7.1",
"requested": "1.7.1"
},
"org.slf4j:slf4j-api": {
"locked": "1.7.25",
"requested": "1.7.1"
},
"org.springframework:spring-core": {
"locked": "4.3.10.RELEASE",
"requested": "4.3.10.RELEASE"
}
},
"runtimeClasspath": {
"com.datastax.cassandra:cassandra-driver-core": {
"locked": "3.3.0",
"requested": "3.3.0"
},
"com.diffplug.durian:durian": {
"locked": "3.5.0-SNAPSHOT",
"requested": "3.+"
},
"com.google.guava:guava": {
"locked": "20.0",
"requested": "20.0"
},
"io.dropwizard.metrics:metrics-core": {
"locked": "3.2.2",
"requested": "3.2.2"
},
"io.zipkin.brave:brave": {
"locked": "4.0.6",
"requested": "4.0.6"
},
"io.zipkin.java:zipkin": {
"locked": "1.29.2",
"requested": "1.29.2"
},
"javax.validation:validation-api": {
"locked": "2.0.0.CR3",
"requested": "2.0.0.CR3"
},
"org.apache.commons:commons-lang3": {
"locked": "3.6",
"requested": "3.6"
},
"org.aspectj:aspectjrt": {
"locked": "1.8.10",
"requested": "1.8.10"
},
"org.aspectj:aspectjweaver": {
"locked": "1.8.10",
"requested": "1.8.10"
},
"org.slf4j:jcl-over-slf4j": {
"locked": "1.7.1",
"requested": "1.7.1"
},
"org.slf4j:slf4j-api": {
"locked": "1.7.25",
"requested": "1.7.1"
},
"org.springframework:spring-core": {
"locked": "4.3.10.RELEASE",
"requested": "4.3.10.RELEASE"
}
},
"testCompile": {
"com.anthemengineering.mojo:infer-maven-plugin": {
"locked": "0.1.0",
"requested": "0.1.0"
},
"com.datastax.cassandra:cassandra-driver-core": {
"locked": "3.3.0",
"requested": "3.3.0"
},
"com.diffplug.durian:durian": {
"locked": "3.5.0-SNAPSHOT",
"requested": "3.+"
},
"com.github.stephenc:jamm": {
"locked": "0.2.5",
"requested": "0.2.5"
},
"com.google.guava:guava": {
"locked": "21.0",
"requested": "20.0"
},
"commons-io:commons-io": {
"locked": "2.5",
"requested": "2.5"
},
"io.dropwizard.metrics:metrics-core": {
"locked": "3.2.2",
"requested": "3.2.2"
},
"io.zipkin.brave:brave": {
"locked": "4.0.6",
"requested": "4.0.6"
},
"io.zipkin.java:zipkin": {
"locked": "1.29.2",
"requested": "1.29.2"
},
"javax.validation:validation-api": {
"locked": "2.0.0.CR3",
"requested": "2.0.0.CR3"
},
"junit:junit": {
"locked": "4.12",
"requested": "4.12"
},
"org.apache.cassandra:cassandra-all": {
"locked": "3.11.0",
"requested": "3.11.0"
},
"org.apache.commons:commons-lang3": {
"locked": "3.6",
"requested": "3.6"
},
"org.aspectj:aspectjrt": {
"locked": "1.8.10",
"requested": "1.8.10"
},
"org.aspectj:aspectjweaver": {
"locked": "1.8.10",
"requested": "1.8.10"
},
"org.cassandraunit:cassandra-unit": {
"locked": "3.1.4.0-SNAPSHOT",
"requested": "3.1.4.0-SNAPSHOT"
},
"org.codehaus.jackson:jackson-core-asl": {
"locked": "1.9.13",
"requested": "1.9.13"
},
"org.codehaus.jackson:jackson-mapper-asl": {
"locked": "1.9.13",
"requested": "1.9.13"
},
"org.hamcrest:hamcrest-core": {
"locked": "1.3",
"requested": "1.3"
},
"org.hamcrest:hamcrest-library": {
"locked": "1.3",
"requested": "1.3"
},
"org.mockito:mockito-core": {
"locked": "2.8.47",
"requested": "2.8.47"
},
"org.slf4j:slf4j-api": {
"locked": "1.7.25",
"requested": "1.7.1"
},
"org.springframework:spring-core": {
"locked": "4.3.10.RELEASE",
"requested": "4.3.10.RELEASE"
}
},
"testCompileClasspath": {
"com.anthemengineering.mojo:infer-maven-plugin": {
"locked": "0.1.0",
"requested": "0.1.0"
},
"com.datastax.cassandra:cassandra-driver-core": {
"locked": "3.3.0",
"requested": "3.3.0"
},
"com.diffplug.durian:durian": {
"locked": "3.5.0-SNAPSHOT",
"requested": "3.+"
},
"com.github.stephenc:jamm": {
"locked": "0.2.5",
"requested": "0.2.5"
},
"com.google.guava:guava": {
"locked": "21.0",
"requested": "20.0"
},
"commons-io:commons-io": {
"locked": "2.5",
"requested": "2.5"
},
"io.dropwizard.metrics:metrics-core": {
"locked": "3.2.2",
"requested": "3.2.2"
},
"io.zipkin.brave:brave": {
"locked": "4.0.6",
"requested": "4.0.6"
},
"io.zipkin.java:zipkin": {
"locked": "1.29.2",
"requested": "1.29.2"
},
"javax.validation:validation-api": {
"locked": "2.0.0.CR3",
"requested": "2.0.0.CR3"
},
"junit:junit": {
"locked": "4.12",
"requested": "4.12"
},
"org.apache.cassandra:cassandra-all": {
"locked": "3.11.0",
"requested": "3.11.0"
},
"org.apache.commons:commons-lang3": {
"locked": "3.6",
"requested": "3.6"
},
"org.aspectj:aspectjrt": {
"locked": "1.8.10",
"requested": "1.8.10"
},
"org.aspectj:aspectjweaver": {
"locked": "1.8.10",
"requested": "1.8.10"
},
"org.cassandraunit:cassandra-unit": {
"locked": "3.1.4.0-SNAPSHOT",
"requested": "3.1.4.0-SNAPSHOT"
},
"org.codehaus.jackson:jackson-core-asl": {
"locked": "1.9.13",
"requested": "1.9.13"
},
"org.codehaus.jackson:jackson-mapper-asl": {
"locked": "1.9.13",
"requested": "1.9.13"
},
"org.hamcrest:hamcrest-core": {
"locked": "1.3",
"requested": "1.3"
},
"org.hamcrest:hamcrest-library": {
"locked": "1.3",
"requested": "1.3"
},
"org.mockito:mockito-core": {
"locked": "2.8.47",
"requested": "2.8.47"
},
"org.slf4j:slf4j-api": {
"locked": "1.7.25",
"requested": "1.7.1"
},
"org.springframework:spring-core": {
"locked": "4.3.10.RELEASE",
"requested": "4.3.10.RELEASE"
}
},
"testRuntime": {
"com.anthemengineering.mojo:infer-maven-plugin": {
"locked": "0.1.0",
"requested": "0.1.0"
},
"com.datastax.cassandra:cassandra-driver-core": {
"locked": "3.3.0",
"requested": "3.3.0"
},
"com.diffplug.durian:durian": {
"locked": "3.5.0-SNAPSHOT",
"requested": "3.+"
},
"com.github.stephenc:jamm": {
"locked": "0.2.5",
"requested": "0.2.5"
},
"com.google.guava:guava": {
"locked": "21.0",
"requested": "20.0"
},
"commons-io:commons-io": {
"locked": "2.5",
"requested": "2.5"
},
"io.dropwizard.metrics:metrics-core": {
"locked": "3.2.2",
"requested": "3.2.2"
},
"io.zipkin.brave:brave": {
"locked": "4.0.6",
"requested": "4.0.6"
},
"io.zipkin.java:zipkin": {
"locked": "1.29.2",
"requested": "1.29.2"
},
"javax.validation:validation-api": {
"locked": "2.0.0.CR3",
"requested": "2.0.0.CR3"
},
"junit:junit": {
"locked": "4.12",
"requested": "4.12"
},
"org.apache.cassandra:cassandra-all": {
"locked": "3.11.0",
"requested": "3.11.0"
},
"org.apache.commons:commons-lang3": {
"locked": "3.6",
"requested": "3.6"
},
"org.aspectj:aspectjrt": {
"locked": "1.8.10",
"requested": "1.8.10"
},
"org.aspectj:aspectjweaver": {
"locked": "1.8.10",
"requested": "1.8.10"
},
"org.cassandraunit:cassandra-unit": {
"locked": "3.1.4.0-SNAPSHOT",
"requested": "3.1.4.0-SNAPSHOT"
},
"org.codehaus.jackson:jackson-core-asl": {
"locked": "1.9.13",
"requested": "1.9.13"
},
"org.codehaus.jackson:jackson-mapper-asl": {
"locked": "1.9.13",
"requested": "1.9.13"
},
"org.hamcrest:hamcrest-core": {
"locked": "1.3",
"requested": "1.3"
},
"org.hamcrest:hamcrest-library": {
"locked": "1.3",
"requested": "1.3"
},
"org.mockito:mockito-core": {
"locked": "2.8.47",
"requested": "2.8.47"
},
"org.slf4j:jcl-over-slf4j": {
"locked": "1.7.7",
"requested": "1.7.1"
},
"org.slf4j:slf4j-api": {
"locked": "1.7.25",
"requested": "1.7.1"
},
"org.springframework:spring-core": {
"locked": "4.3.10.RELEASE",
"requested": "4.3.10.RELEASE"
}
},
"testRuntimeClasspath": {
"com.anthemengineering.mojo:infer-maven-plugin": {
"locked": "0.1.0",
"requested": "0.1.0"
},
"com.datastax.cassandra:cassandra-driver-core": {
"locked": "3.3.0",
"requested": "3.3.0"
},
"com.diffplug.durian:durian": {
"locked": "3.5.0-SNAPSHOT",
"requested": "3.+"
},
"com.github.stephenc:jamm": {
"locked": "0.2.5",
"requested": "0.2.5"
},
"com.google.guava:guava": {
"locked": "21.0",
"requested": "20.0"
},
"commons-io:commons-io": {
"locked": "2.5",
"requested": "2.5"
},
"io.dropwizard.metrics:metrics-core": {
"locked": "3.2.2",
"requested": "3.2.2"
},
"io.zipkin.brave:brave": {
"locked": "4.0.6",
"requested": "4.0.6"
},
"io.zipkin.java:zipkin": {
"locked": "1.29.2",
"requested": "1.29.2"
},
"javax.validation:validation-api": {
"locked": "2.0.0.CR3",
"requested": "2.0.0.CR3"
},
"junit:junit": {
"locked": "4.12",
"requested": "4.12"
},
"org.apache.cassandra:cassandra-all": {
"locked": "3.11.0",
"requested": "3.11.0"
},
"org.apache.commons:commons-lang3": {
"locked": "3.6",
"requested": "3.6"
},
"org.aspectj:aspectjrt": {
"locked": "1.8.10",
"requested": "1.8.10"
},
"org.aspectj:aspectjweaver": {
"locked": "1.8.10",
"requested": "1.8.10"
},
"org.cassandraunit:cassandra-unit": {
"locked": "3.1.4.0-SNAPSHOT",
"requested": "3.1.4.0-SNAPSHOT"
},
"org.codehaus.jackson:jackson-core-asl": {
"locked": "1.9.13",
"requested": "1.9.13"
},
"org.codehaus.jackson:jackson-mapper-asl": {
"locked": "1.9.13",
"requested": "1.9.13"
},
"org.hamcrest:hamcrest-core": {
"locked": "1.3",
"requested": "1.3"
},
"org.hamcrest:hamcrest-library": {
"locked": "1.3",
"requested": "1.3"
},
"org.mockito:mockito-core": {
"locked": "2.8.47",
"requested": "2.8.47"
},
"org.slf4j:jcl-over-slf4j": {
"locked": "1.7.7",
"requested": "1.7.1"
},
"org.slf4j:slf4j-api": {
"locked": "1.7.25",
"requested": "1.7.1"
},
"org.springframework:spring-core": {
"locked": "4.3.10.RELEASE",
"requested": "4.3.10.RELEASE"
}
}
}

View file

@ -11,7 +11,7 @@
</content> </content>
<orderEntry type="inheritedJdk" /> <orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" /> <orderEntry type="sourceFolder" forTests="false" />
<orderEntry type="library" name="Maven: com.datastax.cassandra:cassandra-driver-core:3.3.0" level="project" /> <orderEntry type="library" name="Maven: com.datastax.cassandra:cassandra-driver-core:3.3.2" level="project" />
<orderEntry type="library" name="Maven: io.netty:netty-handler:4.0.47.Final" level="project" /> <orderEntry type="library" name="Maven: io.netty:netty-handler:4.0.47.Final" level="project" />
<orderEntry type="library" name="Maven: io.netty:netty-buffer:4.0.47.Final" level="project" /> <orderEntry type="library" name="Maven: io.netty:netty-buffer:4.0.47.Final" level="project" />
<orderEntry type="library" name="Maven: io.netty:netty-common:4.0.47.Final" level="project" /> <orderEntry type="library" name="Maven: io.netty:netty-common:4.0.47.Final" level="project" />
@ -28,16 +28,14 @@
<orderEntry type="library" name="Maven: com.github.jnr:jnr-x86asm:1.0.2" level="project" /> <orderEntry type="library" name="Maven: com.github.jnr:jnr-x86asm:1.0.2" level="project" />
<orderEntry type="library" name="Maven: com.github.jnr:jnr-posix:3.0.27" level="project" /> <orderEntry type="library" name="Maven: com.github.jnr:jnr-posix:3.0.27" level="project" />
<orderEntry type="library" name="Maven: com.github.jnr:jnr-constants:0.9.0" level="project" /> <orderEntry type="library" name="Maven: com.github.jnr:jnr-constants:0.9.0" level="project" />
<orderEntry type="library" name="Maven: com.datastax.cassandra:cassandra-driver-extras:3.3.2" level="project" />
<orderEntry type="library" name="Maven: com.diffplug.durian:durian:3.4.0" level="project" /> <orderEntry type="library" name="Maven: com.diffplug.durian:durian:3.4.0" level="project" />
<orderEntry type="library" name="Maven: org.aspectj:aspectjrt:1.8.10" level="project" />
<orderEntry type="library" name="Maven: org.aspectj:aspectjweaver:1.8.10" level="project" /> <orderEntry type="library" name="Maven: org.aspectj:aspectjweaver:1.8.10" level="project" />
<orderEntry type="library" name="Maven: org.apache.commons:commons-lang3:3.6" level="project" /> <orderEntry type="library" name="Maven: org.apache.commons:commons-lang3:3.6" level="project" />
<orderEntry type="library" name="Maven: org.springframework:spring-core:4.3.10.RELEASE" level="project" /> <orderEntry type="library" name="Maven: org.springframework:spring-core:4.3.10.RELEASE" level="project" />
<orderEntry type="library" name="Maven: commons-logging:commons-logging:1.2" level="project" /> <orderEntry type="library" name="Maven: commons-logging:commons-logging:1.2" level="project" />
<orderEntry type="library" name="Maven: javax.cache:cache-api:1.1.0" level="project" />
<orderEntry type="library" name="Maven: com.google.guava:guava:20.0" level="project" /> <orderEntry type="library" name="Maven: com.google.guava:guava:20.0" level="project" />
<orderEntry type="library" name="Maven: io.zipkin.java:zipkin:1.29.2" level="project" />
<orderEntry type="library" name="Maven: io.zipkin.brave:brave:4.0.6" level="project" />
<orderEntry type="library" name="Maven: io.zipkin.reporter:zipkin-reporter:0.6.12" level="project" />
<orderEntry type="library" name="Maven: io.dropwizard.metrics:metrics-core:3.2.2" level="project" /> <orderEntry type="library" name="Maven: io.dropwizard.metrics:metrics-core:3.2.2" level="project" />
<orderEntry type="library" name="Maven: javax.validation:validation-api:2.0.0.CR3" level="project" /> <orderEntry type="library" name="Maven: javax.validation:validation-api:2.0.0.CR3" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.codehaus.jackson:jackson-mapper-asl:1.9.13" level="project" /> <orderEntry type="library" scope="TEST" name="Maven: org.codehaus.jackson:jackson-mapper-asl:1.9.13" level="project" />
@ -116,9 +114,9 @@
<orderEntry type="library" scope="TEST" name="Maven: org.caffinitas.ohc:ohc-core:0.4.4" level="project" /> <orderEntry type="library" scope="TEST" name="Maven: org.caffinitas.ohc:ohc-core:0.4.4" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: com.github.ben-manes.caffeine:caffeine:2.2.6" level="project" /> <orderEntry type="library" scope="TEST" name="Maven: com.github.ben-manes.caffeine:caffeine:2.2.6" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.jctools:jctools-core:1.2.1" level="project" /> <orderEntry type="library" scope="TEST" name="Maven: org.jctools:jctools-core:1.2.1" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: ca.exprofesso:guava-jcache:1.0.4" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: commons-io:commons-io:2.5" level="project" /> <orderEntry type="library" scope="TEST" name="Maven: commons-io:commons-io:2.5" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: junit:junit:4.12" level="project" /> <orderEntry type="library" scope="TEST" name="Maven: junit:junit:4.12" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: com.github.stephenc:jamm:0.2.5" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.hamcrest:hamcrest-library:1.3" level="project" /> <orderEntry type="library" scope="TEST" name="Maven: org.hamcrest:hamcrest-library:1.3" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.hamcrest:hamcrest-core:1.3" level="project" /> <orderEntry type="library" scope="TEST" name="Maven: org.hamcrest:hamcrest-core:1.3" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.mockito:mockito-core:2.8.47" level="project" /> <orderEntry type="library" scope="TEST" name="Maven: org.mockito:mockito-core:2.8.47" level="project" />

60
pom.xml
View file

@ -109,7 +109,13 @@
<dependency> <dependency>
<groupId>com.datastax.cassandra</groupId> <groupId>com.datastax.cassandra</groupId>
<artifactId>cassandra-driver-core</artifactId> <artifactId>cassandra-driver-core</artifactId>
<version>3.3.0</version> <version>3.3.2</version>
</dependency>
<dependency>
<groupId>com.datastax.cassandra</groupId>
<artifactId>cassandra-driver-extras</artifactId>
<version>3.3.2</version>
</dependency> </dependency>
<dependency> <dependency>
@ -118,12 +124,6 @@
<version>3.4.0</version> <version>3.4.0</version>
</dependency> </dependency>
<dependency>
<groupId>org.aspectj</groupId>
<artifactId>aspectjrt</artifactId>
<version>1.8.10</version>
</dependency>
<dependency> <dependency>
<groupId>org.aspectj</groupId> <groupId>org.aspectj</groupId>
<artifactId>aspectjweaver</artifactId> <artifactId>aspectjweaver</artifactId>
@ -142,25 +142,19 @@
<version>4.3.10.RELEASE</version> <version>4.3.10.RELEASE</version>
</dependency> </dependency>
<dependency>
<groupId>javax.cache</groupId>
<artifactId>cache-api</artifactId>
<version>1.1.0</version>
</dependency>
<dependency> <dependency>
<groupId>com.google.guava</groupId> <groupId>com.google.guava</groupId>
<artifactId>guava</artifactId> <artifactId>guava</artifactId>
<version>20.0</version> <version>20.0</version>
</dependency> </dependency>
<!-- Metrics and tracing --> <!-- Metrics -->
<dependency>
<groupId>io.zipkin.java</groupId>
<artifactId>zipkin</artifactId>
<version>1.29.2</version>
</dependency>
<dependency>
<groupId>io.zipkin.brave</groupId>
<artifactId>brave</artifactId>
<version>4.0.6</version>
</dependency>
<dependency> <dependency>
<groupId>io.dropwizard.metrics</groupId> <groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-core</artifactId> <artifactId>metrics-core</artifactId>
@ -217,6 +211,24 @@
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency>
<groupId>ca.exprofesso</groupId>
<artifactId>guava-jcache</artifactId>
<version>1.0.4</version>
<exclusions>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
<exclusion>
<groupId>javax.cache</groupId>
<artifactId>cache-api</artifactId>
</exclusion>
</exclusions>
<scope>test</scope>
</dependency>
<dependency> <dependency>
<groupId>commons-io</groupId> <groupId>commons-io</groupId>
<artifactId>commons-io</artifactId> <artifactId>commons-io</artifactId>
@ -231,13 +243,6 @@
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency>
<groupId>com.github.stephenc</groupId>
<artifactId>jamm</artifactId>
<version>0.2.5</version>
<scope>test</scope>
</dependency>
<dependency> <dependency>
<groupId>org.hamcrest</groupId> <groupId>org.hamcrest</groupId>
<artifactId>hamcrest-library</artifactId> <artifactId>hamcrest-library</artifactId>
@ -272,7 +277,6 @@
<version>1.7.1</version> <version>1.7.1</version>
<scope>runtime</scope> <scope>runtime</scope>
</dependency> </dependency>
</dependencies> </dependencies>
<build> <build>

View file

@ -1 +0,0 @@
rootProject.name = 'helenus-core'

View file

@ -5,7 +5,7 @@ import com.datastax.driver.core.querybuilder.Select;
public class CreateMaterializedView extends Create { public class CreateMaterializedView extends Create {
private String viewName; private final String viewName;
private Select.Where selection; private Select.Where selection;
private String primaryKey; private String primaryKey;
private String clustering; private String clustering;

View file

@ -33,6 +33,6 @@ public abstract class AbstractAuditedEntityDraft<E> extends AbstractEntityDraft<
} }
public Date createdAt() { public Date createdAt() {
return (Date) get("createdAt", Date.class); return get("createdAt", Date.class);
} }
} }

View file

@ -2,22 +2,36 @@ package net.helenus.core;
import com.google.common.primitives.Primitives; import com.google.common.primitives.Primitives;
import java.io.Serializable; import java.io.Serializable;
import java.util.*; import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import net.helenus.core.reflect.DefaultPrimitiveTypes; import net.helenus.core.reflect.DefaultPrimitiveTypes;
import net.helenus.core.reflect.Drafted; import net.helenus.core.reflect.Drafted;
import net.helenus.core.reflect.MapExportable; import net.helenus.core.reflect.MapExportable;
import net.helenus.mapping.HelenusProperty;
import net.helenus.mapping.MappingUtil; import net.helenus.mapping.MappingUtil;
import org.apache.commons.lang3.SerializationUtils; import org.apache.commons.lang3.SerializationUtils;
public abstract class AbstractEntityDraft<E> implements Drafted<E> { public abstract class AbstractEntityDraft<E> implements Drafted<E> {
private final Map<String, Object> backingMap = new HashMap<String, Object>();
private final MapExportable entity; private final MapExportable entity;
private final Map<String, Object> entityMap; private final Map<String, Object> valuesMap;
private final Set<String> readSet;
private final Map<String, Object> mutationsMap = new HashMap<String, Object>();
public AbstractEntityDraft(MapExportable entity) { public AbstractEntityDraft(MapExportable entity) {
this.entity = entity; this.entity = entity;
this.entityMap = entity != null ? entity.toMap() : new HashMap<String, Object>(); // Entities can mutate their map.
if (entity != null) {
this.valuesMap = entity.toMap(true);
this.readSet = entity.toReadSet();
} else {
this.valuesMap = new HashMap<String, Object>();
this.readSet = new HashSet<String>();
}
} }
public abstract Class<E> getEntityClass(); public abstract Class<E> getEntityClass();
@ -33,10 +47,11 @@ public abstract class AbstractEntityDraft<E> implements Drafted<E> {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public <T> T get(String key, Class<?> returnType) { public <T> T get(String key, Class<?> returnType) {
T value = (T) backingMap.get(key); readSet.add(key);
T value = (T) mutationsMap.get(key);
if (value == null) { if (value == null) {
value = (T) entityMap.get(key); value = (T) valuesMap.get(key);
if (value == null) { if (value == null) {
if (Primitives.allPrimitiveTypes().contains(returnType)) { if (Primitives.allPrimitiveTypes().contains(returnType)) {
@ -49,23 +64,28 @@ public abstract class AbstractEntityDraft<E> implements Drafted<E> {
return (T) type.getDefaultValue(); return (T) type.getDefaultValue();
} }
} else { } else {
// Collections fetched from the entityMap // Collections fetched from the valuesMap
if (value instanceof Collection) { if (value instanceof Collection) {
try {
value = MappingUtil.<T>clone(value);
} catch (CloneNotSupportedException e) {
// TODO(gburd): deep?shallow? copy of List, Map, Set to a mutable collection.
value = (T) SerializationUtils.<Serializable>clone((Serializable) value); value = (T) SerializationUtils.<Serializable>clone((Serializable) value);
} }
} }
} }
}
return value; return value;
} }
public <T> Object set(Getter<T> getter, Object value) { public <T> Object set(Getter<T> getter, Object value) {
return set(this.<T>methodNameFor(getter), value); HelenusProperty prop = MappingUtil.resolveMappingProperty(getter).getProperty();
String key = prop.getPropertyName();
HelenusValidator.INSTANCE.validate(prop, value);
if (key == null || value == null) {
return null;
}
mutationsMap.put(key, value);
return value;
} }
public Object set(String key, Object value) { public Object set(String key, Object value) {
@ -73,37 +93,35 @@ public abstract class AbstractEntityDraft<E> implements Drafted<E> {
return null; return null;
} }
backingMap.put(key, value); mutationsMap.put(key, value);
return value; return value;
} }
public void put(String key, Object value) {
mutationsMap.put(key, value);
}
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public <T> T mutate(Getter<T> getter, T value) { public <T> T mutate(Getter<T> getter, T value) {
return (T) mutate(this.<T>methodNameFor(getter), value); return (T) mutate(this.<T>methodNameFor(getter), value);
} }
public Object mutate(String key, Object value) { public <T> T mutate(String key, T value) {
Objects.requireNonNull(key); Objects.requireNonNull(key);
if (value == null) { if (value != null) {
return null;
}
if (entity != null) { if (entity != null) {
Map<String, Object> map = entity.toMap(); T currentValue = this.<T>fetch(key);
if (!value.equals(currentValue)) {
if (map.containsKey(key) && !value.equals(map.get(key))) { mutationsMap.put(key, value);
backingMap.put(key, value);
return value; return value;
} }
return map.get(key);
} else { } else {
backingMap.put(key, value); mutationsMap.put(key, value);
return null;
} }
} }
return null;
}
private <T> String methodNameFor(Getter<T> getter) { private <T> String methodNameFor(Getter<T> getter) {
return MappingUtil.resolveMappingProperty(getter).getProperty().getPropertyName(); return MappingUtil.resolveMappingProperty(getter).getProperty().getPropertyName();
@ -115,8 +133,8 @@ public abstract class AbstractEntityDraft<E> implements Drafted<E> {
public Object unset(String key) { public Object unset(String key) {
if (key != null) { if (key != null) {
Object value = backingMap.get(key); Object value = mutationsMap.get(key);
backingMap.put(key, null); mutationsMap.put(key, null);
return value; return value;
} }
return null; return null;
@ -126,10 +144,18 @@ public abstract class AbstractEntityDraft<E> implements Drafted<E> {
return this.<T>reset(this.<T>methodNameFor(getter), desiredValue); return this.<T>reset(this.<T>methodNameFor(getter), desiredValue);
} }
private <T> T fetch(String key) {
T value = (T) mutationsMap.get(key);
if (value == null) {
value = (T) valuesMap.get(key);
}
return value;
}
public <T> boolean reset(String key, T desiredValue) { public <T> boolean reset(String key, T desiredValue) {
if (key != null && desiredValue != null) { if (key != null && desiredValue != null) {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
T currentValue = (T) backingMap.get(key); T currentValue = (T) this.<T>fetch(key);
if (currentValue == null || !currentValue.equals(desiredValue)) { if (currentValue == null || !currentValue.equals(desiredValue)) {
set(key, desiredValue); set(key, desiredValue);
return true; return true;
@ -140,7 +166,7 @@ public abstract class AbstractEntityDraft<E> implements Drafted<E> {
@Override @Override
public Map<String, Object> toMap() { public Map<String, Object> toMap() {
return toMap(entityMap); return toMap(valuesMap);
} }
public Map<String, Object> toMap(Map<String, Object> entityMap) { public Map<String, Object> toMap(Map<String, Object> entityMap) {
@ -151,21 +177,26 @@ public abstract class AbstractEntityDraft<E> implements Drafted<E> {
combined.put(e.getKey(), e.getValue()); combined.put(e.getKey(), e.getValue());
} }
} else { } else {
combined = new HashMap<String, Object>(backingMap.size()); combined = new HashMap<String, Object>(mutationsMap.size());
} }
for (String key : mutated()) { for (String key : mutated()) {
combined.put(key, backingMap.get(key)); combined.put(key, mutationsMap.get(key));
} }
return combined; return combined;
} }
@Override @Override
public Set<String> mutated() { public Set<String> mutated() {
return backingMap.keySet(); return mutationsMap.keySet();
}
@Override
public Set<String> read() {
return readSet;
} }
@Override @Override
public String toString() { public String toString() {
return backingMap.toString(); return mutationsMap.toString();
} }
} }

View file

@ -15,7 +15,6 @@
*/ */
package net.helenus.core; package net.helenus.core;
import brave.Tracer;
import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.MetricRegistry;
import com.datastax.driver.core.*; import com.datastax.driver.core.*;
import com.google.common.base.Stopwatch; import com.google.common.base.Stopwatch;
@ -25,7 +24,6 @@ import java.io.PrintStream;
import java.util.List; import java.util.List;
import java.util.concurrent.Executor; import java.util.concurrent.Executor;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
import net.helenus.core.operation.Operation;
import net.helenus.mapping.value.ColumnValuePreparer; import net.helenus.mapping.value.ColumnValuePreparer;
import net.helenus.mapping.value.ColumnValueProvider; import net.helenus.mapping.value.ColumnValueProvider;
import net.helenus.support.Either; import net.helenus.support.Either;
@ -43,6 +41,8 @@ public abstract class AbstractSessionOperations {
public abstract boolean isShowCql(); public abstract boolean isShowCql();
public abstract boolean showValues();
public abstract PrintStream getPrintStream(); public abstract PrintStream getPrintStream();
public abstract Executor getExecutor(); public abstract Executor getExecutor();
@ -59,7 +59,6 @@ public abstract class AbstractSessionOperations {
public PreparedStatement prepare(RegularStatement statement) { public PreparedStatement prepare(RegularStatement statement) {
try { try {
logStatement(statement, false);
return currentSession().prepare(statement); return currentSession().prepare(statement);
} catch (RuntimeException e) { } catch (RuntimeException e) {
throw translateException(e); throw translateException(e);
@ -68,64 +67,48 @@ public abstract class AbstractSessionOperations {
public ListenableFuture<PreparedStatement> prepareAsync(RegularStatement statement) { public ListenableFuture<PreparedStatement> prepareAsync(RegularStatement statement) {
try { try {
logStatement(statement, false);
return currentSession().prepareAsync(statement); return currentSession().prepareAsync(statement);
} catch (RuntimeException e) { } catch (RuntimeException e) {
throw translateException(e); throw translateException(e);
} }
} }
public ResultSet execute(Statement statement, boolean showValues) { public ResultSet execute(Statement statement) {
return execute(statement, null, null, showValues); return execute(statement, null, null);
} }
public ResultSet execute(Statement statement, Stopwatch timer, boolean showValues) { public ResultSet execute(Statement statement, Stopwatch timer) {
return execute(statement, null, timer, showValues); return execute(statement, null, timer);
} }
public ResultSet execute(Statement statement, UnitOfWork uow, boolean showValues) { public ResultSet execute(Statement statement, UnitOfWork uow) {
return execute(statement, uow, null, showValues); return execute(statement, uow, null);
} }
public ResultSet execute( public ResultSet execute(Statement statement, UnitOfWork uow, Stopwatch timer) {
Statement statement, UnitOfWork uow, Stopwatch timer, boolean showValues) { return executeAsync(statement, uow, timer).getUninterruptibly();
return executeAsync(statement, uow, timer, showValues).getUninterruptibly();
} }
public ResultSetFuture executeAsync(Statement statement, boolean showValues) { public ResultSetFuture executeAsync(Statement statement) {
return executeAsync(statement, null, null, showValues); return executeAsync(statement, null, null);
} }
public ResultSetFuture executeAsync(Statement statement, Stopwatch timer, boolean showValues) { public ResultSetFuture executeAsync(Statement statement, Stopwatch timer) {
return executeAsync(statement, null, timer, showValues); return executeAsync(statement, null, timer);
} }
public ResultSetFuture executeAsync(Statement statement, UnitOfWork uow, boolean showValues) { public ResultSetFuture executeAsync(Statement statement, UnitOfWork uow) {
return executeAsync(statement, uow, null, showValues); return executeAsync(statement, uow, null);
} }
public ResultSetFuture executeAsync( public ResultSetFuture executeAsync(Statement statement, UnitOfWork uow, Stopwatch timer) {
Statement statement, UnitOfWork uow, Stopwatch timer, boolean showValues) {
try { try {
logStatement(statement, showValues);
return currentSession().executeAsync(statement); return currentSession().executeAsync(statement);
} catch (RuntimeException e) { } catch (RuntimeException e) {
throw translateException(e); throw translateException(e);
} }
} }
private void logStatement(Statement statement, boolean showValues) {
if (isShowCql()) {
printCql(Operation.queryString(statement, showValues));
} else if (LOG.isDebugEnabled()) {
LOG.info("CQL> " + Operation.queryString(statement, showValues));
}
}
public Tracer getZipkinTracer() {
return null;
}
public MetricRegistry getMetricRegistry() { public MetricRegistry getMetricRegistry() {
return null; return null;
} }
@ -145,9 +128,5 @@ public abstract class AbstractSessionOperations {
public void updateCache(Object pojo, List<Facet> facets) {} public void updateCache(Object pojo, List<Facet> facets) {}
void printCql(String cql) {
getPrintStream().println(cql);
}
public void cacheEvict(List<Facet> facets) {} public void cacheEvict(List<Facet> facets) {}
} }

View file

@ -1,407 +0,0 @@
/*
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.helenus.core;
import static net.helenus.core.HelenusSession.deleted;
import com.diffplug.common.base.Errors;
import com.google.common.base.Stopwatch;
import com.google.common.collect.HashBasedTable;
import com.google.common.collect.Table;
import com.google.common.collect.TreeTraverser;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import net.helenus.core.cache.CacheUtil;
import net.helenus.core.cache.Facet;
import net.helenus.support.Either;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** Encapsulates the concept of a "transaction" as a unit-of-work. */
public abstract class AbstractUnitOfWork<E extends Exception>
implements UnitOfWork<E>, AutoCloseable {
private static final Logger LOG = LoggerFactory.getLogger(AbstractUnitOfWork.class);
private final List<AbstractUnitOfWork<E>> nested = new ArrayList<>();
private final HelenusSession session;
private final AbstractUnitOfWork<E> parent;
private final Table<String, String, Either<Object, List<Facet>>> cache = HashBasedTable.create();
protected String purpose;
protected List<String> nestedPurposes = new ArrayList<String>();
protected String info;
protected int cacheHits = 0;
protected int cacheMisses = 0;
protected int databaseLookups = 0;
protected Stopwatch elapsedTime;
protected Map<String, Double> databaseTime = new HashMap<>();
protected double cacheLookupTime = 0.0;
private List<CommitThunk> postCommit = new ArrayList<CommitThunk>();
private boolean aborted = false;
private boolean committed = false;
protected AbstractUnitOfWork(HelenusSession session, AbstractUnitOfWork<E> parent) {
Objects.requireNonNull(session, "containing session cannot be null");
this.session = session;
this.parent = parent;
}
@Override
public void addDatabaseTime(String name, Stopwatch amount) {
Double time = databaseTime.get(name);
if (time == null) {
databaseTime.put(name, (double) amount.elapsed(TimeUnit.MICROSECONDS));
} else {
databaseTime.put(name, time + amount.elapsed(TimeUnit.MICROSECONDS));
}
}
@Override
public void addCacheLookupTime(Stopwatch amount) {
cacheLookupTime += amount.elapsed(TimeUnit.MICROSECONDS);
}
@Override
public void addNestedUnitOfWork(UnitOfWork<E> uow) {
synchronized (nested) {
nested.add((AbstractUnitOfWork<E>) uow);
}
}
@Override
public synchronized UnitOfWork<E> begin() {
if (LOG.isInfoEnabled()) {
elapsedTime = Stopwatch.createStarted();
}
// log.record(txn::start)
return this;
}
@Override
public String getPurpose() {
return purpose;
}
@Override
public UnitOfWork setPurpose(String purpose) {
this.purpose = purpose;
return this;
}
@Override
public void setInfo(String info) {
this.info = info;
}
@Override
public void recordCacheAndDatabaseOperationCount(int cache, int ops) {
if (cache > 0) {
cacheHits += cache;
} else {
cacheMisses += Math.abs(cache);
}
if (ops > 0) {
databaseLookups += ops;
}
}
public String logTimers(String what) {
double e = (double) elapsedTime.elapsed(TimeUnit.MICROSECONDS) / 1000.0;
double d = 0.0;
double c = cacheLookupTime / 1000.0;
double fc = (c / e) * 100.0;
String database = "";
if (databaseTime.size() > 0) {
List<String> dbt = new ArrayList<>(databaseTime.size());
for (Map.Entry<String, Double> dt : databaseTime.entrySet()) {
double t = dt.getValue() / 1000.0;
d += t;
dbt.add(String.format("%s took %,.3fms %,2.2f%%", dt.getKey(), t, (t / e) * 100.0));
}
double fd = (d / e) * 100.0;
database =
String.format(
", %d quer%s (%,.3fms %,2.2f%% - %s)",
databaseLookups, (databaseLookups > 1) ? "ies" : "y", d, fd, String.join(", ", dbt));
}
String cache = "";
if (cacheLookupTime > 0) {
int cacheLookups = cacheHits + cacheMisses;
cache =
String.format(
" with %d cache lookup%s (%,.3fms %,2.2f%% - %,d hit, %,d miss)",
cacheLookups, cacheLookups > 1 ? "s" : "", c, fc, cacheHits, cacheMisses);
}
String da = "";
if (databaseTime.size() > 0 || cacheLookupTime > 0) {
double dat = d + c;
double daf = (dat / e) * 100;
da =
String.format(
" consuming %,.3fms for data access, or %,2.2f%% of total UOW time.", dat, daf);
}
String x = nestedPurposes.stream().distinct().collect(Collectors.joining(", "));
String n =
nested
.stream()
.map(uow -> String.valueOf(uow.hashCode()))
.collect(Collectors.joining(", "));
String s =
String.format(
Locale.US,
"UOW(%s%s) %s in %,.3fms%s%s%s%s%s%s",
hashCode(),
(nested.size() > 0 ? ", [" + n + "]" : ""),
what,
e,
cache,
database,
da,
(purpose == null ? "" : " " + purpose),
(nestedPurposes.isEmpty()) ? "" : ", " + x,
(info == null) ? "" : " " + info);
return s;
}
private void applyPostCommitFunctions() {
if (!postCommit.isEmpty()) {
for (CommitThunk f : postCommit) {
f.apply();
}
}
if (LOG.isInfoEnabled()) {
LOG.info(logTimers("committed"));
}
}
@Override
public Optional<Object> cacheLookup(List<Facet> facets) {
String tableName = CacheUtil.schemaName(facets);
Optional<Object> result = Optional.empty();
for (Facet facet : facets) {
if (!facet.fixed()) {
String columnName = facet.name() + "==" + facet.value();
Either<Object, List<Facet>> eitherValue = cache.get(tableName, columnName);
if (eitherValue != null) {
Object value = deleted;
if (eitherValue.isLeft()) {
value = eitherValue.getLeft();
}
result = Optional.of(value);
break;
}
}
}
if (!result.isPresent()) {
// Be sure to check all enclosing UnitOfWork caches as well, we may be nested.
if (parent != null) {
return parent.cacheLookup(facets);
}
}
return result;
}
@Override
public List<Facet> cacheEvict(List<Facet> facets) {
Either<Object, List<Facet>> deletedObjectFacets = Either.right(facets);
String tableName = CacheUtil.schemaName(facets);
Optional<Object> optionalValue = cacheLookup(facets);
if (optionalValue.isPresent()) {
Object value = optionalValue.get();
for (Facet facet : facets) {
if (!facet.fixed()) {
String columnKey = facet.name() + "==" + facet.value();
// mark the value identified by the facet to `deleted`
cache.put(tableName, columnKey, deletedObjectFacets);
}
}
// look for other row/col pairs that referenced the same object, mark them
// `deleted`
cache
.columnKeySet()
.forEach(
columnKey -> {
Either<Object, List<Facet>> eitherCachedValue = cache.get(tableName, columnKey);
if (eitherCachedValue.isLeft()) {
Object cachedValue = eitherCachedValue.getLeft();
if (cachedValue == value) {
cache.put(tableName, columnKey, deletedObjectFacets);
String[] parts = columnKey.split("==");
facets.add(new Facet<String>(parts[0], parts[1]));
}
}
});
}
return facets;
}
@Override
public void cacheUpdate(Object value, List<Facet> facets) {
String tableName = CacheUtil.schemaName(facets);
for (Facet facet : facets) {
if (!facet.fixed()) {
String columnName = facet.name() + "==" + facet.value();
cache.put(tableName, columnName, Either.left(value));
}
}
}
private Iterator<AbstractUnitOfWork<E>> getChildNodes() {
return nested.iterator();
}
/**
* Checks to see if the work performed between calling begin and now can be committed or not.
*
* @return a function from which to chain work that only happens when commit is successful
* @throws E when the work overlaps with other concurrent writers.
*/
public PostCommitFunction<Void, Void> commit() throws E {
// All nested UnitOfWork should be committed (not aborted) before calls to
// commit, check.
boolean canCommit = true;
TreeTraverser<AbstractUnitOfWork<E>> traverser =
TreeTraverser.using(node -> node::getChildNodes);
for (AbstractUnitOfWork<E> uow : traverser.postOrderTraversal(this)) {
if (this != uow) {
canCommit &= (!uow.aborted && uow.committed);
}
}
// log.record(txn::provisionalCommit)
// examine log for conflicts in read-set and write-set between begin and
// provisional commit
// if (conflict) { throw new ConflictingUnitOfWorkException(this) }
// else return function so as to enable commit.andThen(() -> { do something iff
// commit was successful; })
if (canCommit) {
committed = true;
aborted = false;
nested.forEach((uow) -> Errors.rethrow().wrap(uow::commit));
elapsedTime.stop();
if (parent == null) {
// Apply all post-commit functions, this is the outter-most UnitOfWork.
traverser
.postOrderTraversal(this)
.forEach(
uow -> {
uow.applyPostCommitFunctions();
});
// Merge our cache into the session cache.
session.mergeCache(cache);
return new PostCommitFunction(this, null);
} else {
// Merge cache and statistics into parent if there is one.
parent.mergeCache(cache);
if (purpose != null) {
parent.nestedPurposes.add(purpose);
}
parent.cacheHits += cacheHits;
parent.cacheMisses += cacheMisses;
parent.databaseLookups += databaseLookups;
parent.cacheLookupTime += cacheLookupTime;
for (Map.Entry<String, Double> dt : databaseTime.entrySet()) {
String name = dt.getKey();
if (parent.databaseTime.containsKey(name)) {
double t = parent.databaseTime.get(name);
parent.databaseTime.put(name, t + dt.getValue());
} else {
parent.databaseTime.put(name, dt.getValue());
}
}
}
}
// else {
// Constructor<T> ctor = clazz.getConstructor(conflictExceptionClass);
// T object = ctor.newInstance(new Object[] { String message });
// }
return new PostCommitFunction(this, postCommit);
}
/* Explicitly discard the work and mark it as as such in the log. */
public synchronized void abort() {
TreeTraverser<AbstractUnitOfWork<E>> traverser =
TreeTraverser.using(node -> node::getChildNodes);
traverser
.postOrderTraversal(this)
.forEach(
uow -> {
uow.committed = false;
uow.aborted = true;
});
// log.record(txn::abort)
// cache.invalidateSince(txn::start time)
if (LOG.isInfoEnabled()) {
if (elapsedTime.isRunning()) {
elapsedTime.stop();
}
LOG.info(logTimers("aborted"));
}
}
private void mergeCache(Table<String, String, Either<Object, List<Facet>>> from) {
Table<String, String, Either<Object, List<Facet>>> to = this.cache;
from.rowMap()
.forEach(
(rowKey, columnMap) -> {
columnMap.forEach(
(columnKey, value) -> {
if (to.contains(rowKey, columnKey)) {
// TODO(gburd):...
to.put(
rowKey,
columnKey,
Either.left(
CacheUtil.merge(
to.get(rowKey, columnKey).getLeft(),
from.get(rowKey, columnKey).getLeft())));
} else {
to.put(rowKey, columnKey, from.get(rowKey, columnKey));
}
});
});
}
public String describeConflicts() {
return "it's complex...";
}
@Override
public void close() throws E {
// Closing a AbstractUnitOfWork will abort iff we've not already aborted or
// committed this unit of work.
if (aborted == false && committed == false) {
abort();
}
}
public boolean hasAborted() {
return aborted;
}
public boolean hasCommitted() {
return committed;
}
}

View file

@ -79,6 +79,13 @@ public final class Filter<V> {
return new Filter<V>(node, postulate); return new Filter<V>(node, postulate);
} }
public static <V> Filter<V> create(
Getter<V> getter, HelenusPropertyNode node, Postulate<V> postulate) {
Objects.requireNonNull(getter, "empty getter");
Objects.requireNonNull(postulate, "empty operator");
return new Filter<V>(node, postulate);
}
public static <V> Filter<V> create(Getter<V> getter, Operator op, V val) { public static <V> Filter<V> create(Getter<V> getter, Operator op, V val) {
Objects.requireNonNull(getter, "empty getter"); Objects.requireNonNull(getter, "empty getter");
Objects.requireNonNull(op, "empty op"); Objects.requireNonNull(op, "empty op");

View file

@ -81,6 +81,10 @@ public final class Helenus {
return new SessionInitializer(session); return new SessionInitializer(session);
} }
public static SessionInitializer init(Session session, String keyspace) {
return new SessionInitializer(session, keyspace);
}
public static SessionInitializer init(Session session) { public static SessionInitializer init(Session session) {
if (session == null) { if (session == null) {

View file

@ -17,23 +17,19 @@ package net.helenus.core;
import static net.helenus.core.Query.eq; import static net.helenus.core.Query.eq;
import brave.Tracer;
import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.MetricRegistry;
import com.datastax.driver.core.*; import com.datastax.driver.core.*;
import com.google.common.collect.Table; import com.google.common.collect.Table;
import java.io.Closeable; import java.io.Closeable;
import java.io.PrintStream; import java.io.PrintStream;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.*; import java.util.*;
import java.util.concurrent.Executor; import java.util.concurrent.Executor;
import java.util.function.Function; import java.util.function.Function;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import javax.cache.Cache;
import javax.cache.CacheManager;
import net.helenus.core.cache.CacheUtil; import net.helenus.core.cache.CacheUtil;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
import net.helenus.core.cache.SessionCache;
import net.helenus.core.cache.UnboundFacet; import net.helenus.core.cache.UnboundFacet;
import net.helenus.core.operation.*; import net.helenus.core.operation.*;
import net.helenus.core.reflect.Drafted; import net.helenus.core.reflect.Drafted;
@ -47,74 +43,69 @@ import net.helenus.support.*;
import net.helenus.support.Fun.Tuple1; import net.helenus.support.Fun.Tuple1;
import net.helenus.support.Fun.Tuple2; import net.helenus.support.Fun.Tuple2;
import net.helenus.support.Fun.Tuple6; import net.helenus.support.Fun.Tuple6;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class HelenusSession extends AbstractSessionOperations implements Closeable { public class HelenusSession extends AbstractSessionOperations implements Closeable {
public static final Object deleted = new Object(); public static final Object deleted = new Object();
private static final Logger LOG = LoggerFactory.getLogger(HelenusSession.class);
private static final Pattern classNameRegex =
Pattern.compile("^(?:\\w+\\.)+(?:(\\w+)|(\\w+)\\$.*)$");
private final Session session; private final Session session;
private final CodecRegistry registry; private final CodecRegistry registry;
private final ConsistencyLevel defaultConsistencyLevel; private final ConsistencyLevel defaultConsistencyLevel;
private final boolean defaultQueryIdempotency; private final boolean defaultQueryIdempotency;
private final MetricRegistry metricRegistry; private final MetricRegistry metricRegistry;
private final Tracer zipkinTracer;
private final PrintStream printStream; private final PrintStream printStream;
private final Class<? extends UnitOfWork> unitOfWorkClass;
private final SessionRepository sessionRepository; private final SessionRepository sessionRepository;
private final Executor executor; private final Executor executor;
private final boolean dropSchemaOnClose; private final boolean dropSchemaOnClose;
private final SessionCache<String, Object> sessionCache; private final CacheManager cacheManager;
private final RowColumnValueProvider valueProvider; private final RowColumnValueProvider valueProvider;
private final StatementColumnValuePreparer valuePreparer; private final StatementColumnValuePreparer valuePreparer;
private final Metadata metadata; private final Metadata metadata;
private volatile String usingKeyspace; private volatile String usingKeyspace;
private volatile boolean showCql; private volatile boolean showCql;
private volatile boolean showValues;
HelenusSession( HelenusSession(
Session session, Session session,
String usingKeyspace, String usingKeyspace,
CodecRegistry registry, CodecRegistry registry,
boolean showCql, boolean showCql,
boolean showValues,
PrintStream printStream, PrintStream printStream,
SessionRepositoryBuilder sessionRepositoryBuilder, SessionRepositoryBuilder sessionRepositoryBuilder,
Executor executor, Executor executor,
boolean dropSchemaOnClose, boolean dropSchemaOnClose,
ConsistencyLevel consistencyLevel, ConsistencyLevel consistencyLevel,
boolean defaultQueryIdempotency, boolean defaultQueryIdempotency,
Class<? extends UnitOfWork> unitOfWorkClass, CacheManager cacheManager,
SessionCache sessionCache, MetricRegistry metricRegistry) {
MetricRegistry metricRegistry,
Tracer tracer) {
this.session = session; this.session = session;
this.registry = registry == null ? CodecRegistry.DEFAULT_INSTANCE : registry; this.registry = registry == null ? CodecRegistry.DEFAULT_INSTANCE : registry;
this.usingKeyspace = this.usingKeyspace =
Objects.requireNonNull( Objects.requireNonNull(
usingKeyspace, "keyspace needs to be selected before creating session"); usingKeyspace, "keyspace needs to be selected before creating session");
this.showCql = showCql; this.showCql = showCql;
this.showValues = showValues;
this.printStream = printStream; this.printStream = printStream;
this.sessionRepository = sessionRepositoryBuilder.build(); this.sessionRepository =
sessionRepositoryBuilder == null ? null : sessionRepositoryBuilder.build();
this.executor = executor; this.executor = executor;
this.dropSchemaOnClose = dropSchemaOnClose; this.dropSchemaOnClose = dropSchemaOnClose;
this.defaultConsistencyLevel = consistencyLevel; this.defaultConsistencyLevel = consistencyLevel;
this.defaultQueryIdempotency = defaultQueryIdempotency; this.defaultQueryIdempotency = defaultQueryIdempotency;
this.unitOfWorkClass = unitOfWorkClass;
this.metricRegistry = metricRegistry; this.metricRegistry = metricRegistry;
this.zipkinTracer = tracer; this.cacheManager = cacheManager;
if (sessionCache == null) {
this.sessionCache = SessionCache.<String, Object>defaultCache();
} else {
this.sessionCache = sessionCache;
}
this.valueProvider = new RowColumnValueProvider(this.sessionRepository); this.valueProvider = new RowColumnValueProvider(this.sessionRepository);
this.valuePreparer = new StatementColumnValuePreparer(this.sessionRepository); this.valuePreparer = new StatementColumnValuePreparer(this.sessionRepository);
this.metadata = session.getCluster().getMetadata(); this.metadata = session == null ? null : session.getCluster().getMetadata();
}
public UnitOfWork begin() {
return new UnitOfWork(this).begin();
}
public UnitOfWork begin(UnitOfWork parent) {
return new UnitOfWork(this, parent).begin();
} }
@Override @Override
@ -153,6 +144,20 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
return this; return this;
} }
public HelenusSession showQueryValuesInLog(boolean showValues) {
this.showValues = showValues;
return this;
}
public HelenusSession showQueryValuesInLog() {
this.showValues = true;
return this;
}
public boolean showValues() {
return showValues;
}
@Override @Override
public Executor getExecutor() { public Executor getExecutor() {
return executor; return executor;
@ -173,11 +178,6 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
return valuePreparer; return valuePreparer;
} }
@Override
public Tracer getZipkinTracer() {
return zipkinTracer;
}
@Override @Override
public MetricRegistry getMetricRegistry() { public MetricRegistry getMetricRegistry() {
return metricRegistry; return metricRegistry;
@ -195,25 +195,29 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
@Override @Override
public Object checkCache(String tableName, List<Facet> facets) { public Object checkCache(String tableName, List<Facet> facets) {
List<String[]> facetCombinations = CacheUtil.flattenFacets(facets);
Object result = null; Object result = null;
for (String[] combination : facetCombinations) { if (cacheManager != null) {
String cacheKey = tableName + "." + Arrays.toString(combination); Cache<String, Object> cache = cacheManager.getCache(tableName);
result = sessionCache.get(cacheKey); if (cache != null) {
for (String key : CacheUtil.flatKeys(tableName, facets)) {
result = cache.get(key);
if (result != null) { if (result != null) {
return result; return result;
} }
} }
}
}
return null; return null;
} }
@Override @Override
public void cacheEvict(List<Facet> facets) { public void cacheEvict(List<Facet> facets) {
if (cacheManager != null) {
String tableName = CacheUtil.schemaName(facets); String tableName = CacheUtil.schemaName(facets);
List<String[]> facetCombinations = CacheUtil.flattenFacets(facets); Cache<String, Object> cache = cacheManager.getCache(tableName);
for (String[] combination : facetCombinations) { if (cache != null) {
String cacheKey = tableName + "." + Arrays.toString(combination); CacheUtil.flatKeys(tableName, facets).forEach(key -> cache.remove(key));
sessionCache.invalidate(cacheKey); }
} }
} }
@ -235,7 +239,7 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
} }
} else { } else {
value = valueMap.get(prop.getPropertyName()); value = valueMap.get(prop.getPropertyName());
binder.setValueForProperty(prop, value.toString()); if (value != null) binder.setValueForProperty(prop, value.toString());
} }
} }
if (binder.isBound()) { if (binder.isBound()) {
@ -252,6 +256,7 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
@Override @Override
public void mergeCache(Table<String, String, Either<Object, List<Facet>>> uowCache) { public void mergeCache(Table<String, String, Either<Object, List<Facet>>> uowCache) {
if (cacheManager != null) {
List<Object> items = List<Object> items =
uowCache uowCache
.values() .values()
@ -279,8 +284,10 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop); BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop);
binder.setValueForProperty(prop, value.toString()); binder.setValueForProperty(prop, value.toString());
} else { } else {
binder.setValueForProperty( Object v = valueMap.get(prop.getPropertyName());
prop, valueMap.get(prop.getPropertyName()).toString()); if (v != null) {
binder.setValueForProperty(prop, v.toString());
}
} }
}); });
if (binder.isBound()) { if (binder.isBound()) {
@ -305,91 +312,40 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
.collect(Collectors.toList()); .collect(Collectors.toList());
for (List<Facet> facets : deletedFacetSets) { for (List<Facet> facets : deletedFacetSets) {
String tableName = CacheUtil.schemaName(facets); String tableName = CacheUtil.schemaName(facets);
List<String[]> combinations = CacheUtil.flattenFacets(facets); Cache<String, Object> cache = cacheManager.getCache(tableName);
for (String[] combination : combinations) { if (cache != null) {
String cacheKey = tableName + "." + Arrays.toString(combination); List<String> keys = CacheUtil.flatKeys(tableName, facets);
sessionCache.invalidate(cacheKey); keys.forEach(key -> cache.remove(key));
}
} }
} }
} }
private void replaceCachedFacetValues( private void replaceCachedFacetValues(
Object pojo, String tableName, List<String[]> facetCombinations) { Object pojo, String tableName, List<String[]> facetCombinations) {
if (cacheManager != null) {
for (String[] combination : facetCombinations) { for (String[] combination : facetCombinations) {
String cacheKey = tableName + "." + Arrays.toString(combination); String cacheKey = tableName + "." + Arrays.toString(combination);
sessionCache.invalidate(cacheKey); Cache<String, Object> cache = cacheManager.getCache(tableName);
sessionCache.put(cacheKey, pojo); if (cache != null) {
if (pojo == null || pojo == HelenusSession.deleted) {
cache.remove(cacheKey);
} else {
cache.put(cacheKey, pojo);
} }
} }
}
}
}
public CacheManager getCacheManager() {
return cacheManager;
}
public Metadata getMetadata() { public Metadata getMetadata() {
return metadata; return metadata;
} }
public UnitOfWork begin() {
return this.begin(null);
}
private String extractClassNameFromStackFrame(String classNameOnStack) {
String name = null;
Matcher m = classNameRegex.matcher(classNameOnStack);
if (m.find()) {
name = (m.group(1) != null) ? m.group(1) : ((m.group(2) != null) ? m.group(2) : name);
} else {
name = classNameOnStack;
}
return name;
}
public synchronized UnitOfWork begin(UnitOfWork parent) {
try {
Class<? extends UnitOfWork> clazz = unitOfWorkClass;
Constructor<? extends UnitOfWork> ctor =
clazz.getConstructor(HelenusSession.class, UnitOfWork.class);
UnitOfWork uow = ctor.newInstance(this, parent);
if (LOG.isInfoEnabled() && uow.getPurpose() == null) {
StringBuilder purpose = null;
int frame = 0;
StackTraceElement[] trace = Thread.currentThread().getStackTrace();
String targetClassName = HelenusSession.class.getSimpleName();
String stackClassName = null;
do {
frame++;
stackClassName = extractClassNameFromStackFrame(trace[frame].getClassName());
} while (!stackClassName.equals(targetClassName) && frame < trace.length);
do {
frame++;
stackClassName = extractClassNameFromStackFrame(trace[frame].getClassName());
} while (stackClassName.equals(targetClassName) && frame < trace.length);
if (frame < trace.length) {
purpose =
new StringBuilder()
.append(trace[frame].getClassName())
.append(".")
.append(trace[frame].getMethodName())
.append("(")
.append(trace[frame].getFileName())
.append(":")
.append(trace[frame].getLineNumber())
.append(")");
uow.setPurpose(purpose.toString());
}
}
if (parent != null) {
parent.addNestedUnitOfWork(uow);
}
return uow.begin();
} catch (NoSuchMethodException
| InvocationTargetException
| InstantiationException
| IllegalAccessException e) {
throw new HelenusException(
String.format(
"Unable to instantiate %s as a UnitOfWork.", unitOfWorkClass.getSimpleName()),
e);
}
}
public <E> SelectOperation<E> select(E pojo) { public <E> SelectOperation<E> select(E pojo) {
Objects.requireNonNull( Objects.requireNonNull(
pojo, "supplied object must be a dsl for a registered entity but cannot be null"); pojo, "supplied object must be a dsl for a registered entity but cannot be null");
@ -424,9 +380,17 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
return new SelectOperation<Fun.ArrayTuple>(this); return new SelectOperation<Fun.ArrayTuple>(this);
} }
public SelectOperation<Row> selectAll(Class<?> entityClass) { public <E> SelectOperation<E> selectAll(Class<E> entityClass) {
Objects.requireNonNull(entityClass, "entityClass is empty"); Objects.requireNonNull(entityClass, "entityClass is empty");
return new SelectOperation<Row>(this, Helenus.entity(entityClass)); HelenusEntity entity = Helenus.entity(entityClass);
return new SelectOperation<E>(
this,
entity,
(r) -> {
Map<String, Object> map = new ValueProviderMap(r, valueProvider, entity);
return (E) Helenus.map(entityClass, map);
});
} }
public <E> SelectOperation<Row> selectAll(E pojo) { public <E> SelectOperation<Row> selectAll(E pojo) {
@ -699,23 +663,23 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
} catch (HelenusMappingException e) { } catch (HelenusMappingException e) {
} }
if (entity != null) { if (entity != null) {
return new InsertOperation<T>(this, entity.getMappingInterface(), true); return new InsertOperation<T>(this, entity, entity.getMappingInterface(), true);
} else { } else {
return this.<T>insert(pojo, null); return this.<T>insert(pojo, null, null);
} }
} }
public <T> InsertOperation<T> insert(Drafted draft) { public <T> InsertOperation<T> insert(Drafted draft) {
return insert(draft.build(), draft.mutated()); return insert(draft.build(), draft.mutated(), draft.read());
} }
private <T> InsertOperation<T> insert(T pojo, Set<String> mutations) { private <T> InsertOperation<T> insert(T pojo, Set<String> mutations, Set<String> read) {
Objects.requireNonNull(pojo, "pojo is empty"); Objects.requireNonNull(pojo, "pojo is empty");
Class<?> iface = MappingUtil.getMappingInterface(pojo); Class<?> iface = MappingUtil.getMappingInterface(pojo);
HelenusEntity entity = Helenus.entity(iface); HelenusEntity entity = Helenus.entity(iface);
return new InsertOperation<T>(this, entity, pojo, mutations, true); return new InsertOperation<T>(this, entity, pojo, mutations, read, true);
} }
public InsertOperation<ResultSet> upsert() { public InsertOperation<ResultSet> upsert() {
@ -727,7 +691,7 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
} }
public <T> InsertOperation<T> upsert(Drafted draft) { public <T> InsertOperation<T> upsert(Drafted draft) {
return this.<T>upsert((T) draft.build(), draft.mutated()); return this.<T>upsert((T) draft.build(), draft.mutated(), draft.read());
} }
public <T> InsertOperation<T> upsert(T pojo) { public <T> InsertOperation<T> upsert(T pojo) {
@ -740,19 +704,19 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
} catch (HelenusMappingException e) { } catch (HelenusMappingException e) {
} }
if (entity != null) { if (entity != null) {
return new InsertOperation<T>(this, entity.getMappingInterface(), false); return new InsertOperation<T>(this, entity, entity.getMappingInterface(), false);
} else { } else {
return this.<T>upsert(pojo, null); return this.<T>upsert(pojo, null, null);
} }
} }
private <T> InsertOperation<T> upsert(T pojo, Set<String> mutations) { private <T> InsertOperation<T> upsert(T pojo, Set<String> mutations, Set<String> read) {
Objects.requireNonNull(pojo, "pojo is empty"); Objects.requireNonNull(pojo, "pojo is empty");
Class<?> iface = MappingUtil.getMappingInterface(pojo); Class<?> iface = MappingUtil.getMappingInterface(pojo);
HelenusEntity entity = Helenus.entity(iface); HelenusEntity entity = Helenus.entity(iface);
return new InsertOperation<T>(this, entity, pojo, mutations, false); return new InsertOperation<T>(this, entity, pojo, mutations, read, false);
} }
public DeleteOperation delete() { public DeleteOperation delete() {
@ -773,6 +737,9 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
} }
public void close() { public void close() {
if (session == null) {
return;
}
if (session.isClosed()) { if (session.isClosed()) {
return; return;
@ -803,11 +770,11 @@ public class HelenusSession extends AbstractSessionOperations implements Closeab
switch (entity.getType()) { switch (entity.getType()) {
case TABLE: case TABLE:
execute(SchemaUtil.dropTable(entity), true); execute(SchemaUtil.dropTable(entity));
break; break;
case UDT: case UDT:
execute(SchemaUtil.dropUserType(entity), true); execute(SchemaUtil.dropUserType(entity));
break; break;
default: default:

View file

@ -6,20 +6,43 @@ import java.util.Objects;
public class PostCommitFunction<T, R> implements java.util.function.Function<T, R> { public class PostCommitFunction<T, R> implements java.util.function.Function<T, R> {
private final UnitOfWork uow; private final UnitOfWork uow;
private final List<CommitThunk> postCommit; private final List<CommitThunk> commitThunks;
private final List<CommitThunk> abortThunks;
private boolean committed;
PostCommitFunction(UnitOfWork uow, List<CommitThunk> postCommit) { PostCommitFunction(
UnitOfWork uow,
List<CommitThunk> postCommit,
List<CommitThunk> abortThunks,
boolean committed) {
this.uow = uow; this.uow = uow;
this.postCommit = postCommit; this.commitThunks = postCommit;
this.abortThunks = abortThunks;
this.committed = committed;
} }
public void andThen(CommitThunk after) { public PostCommitFunction<T, R> andThen(CommitThunk after) {
Objects.requireNonNull(after); Objects.requireNonNull(after);
if (postCommit == null) { if (commitThunks == null) {
if (committed) {
after.apply(); after.apply();
} else {
postCommit.add(after);
} }
} else {
commitThunks.add(after);
}
return this;
}
public PostCommitFunction<T, R> orElse(CommitThunk after) {
Objects.requireNonNull(after);
if (abortThunks == null) {
if (!committed) {
after.apply();
}
} else {
abortThunks.add(after);
}
return this;
} }
@Override @Override

View file

@ -165,6 +165,14 @@ public final class SchemaUtil {
} }
} }
if (p.size() == 0 && c.size() == 0)
return "{"
+ properties
.stream()
.map(HelenusProperty::getPropertyName)
.collect(Collectors.joining(", "))
+ "}";
return "(" return "("
+ ((p.size() > 1) ? "(" + String.join(", ", p) + ")" : p.get(0)) + ((p.size() > 1) ? "(" + String.join(", ", p) + ")" : p.get(0))
+ ((c.size() > 0) + ((c.size() > 0)
@ -329,7 +337,7 @@ public final class SchemaUtil {
public static SchemaStatement createIndex(HelenusProperty prop) { public static SchemaStatement createIndex(HelenusProperty prop) {
if (prop.caseSensitiveIndex()) { if (prop.caseSensitiveIndex()) {
return SchemaBuilder.createIndex(prop.getIndexName().get().toCql()) return SchemaBuilder.createIndex(indexName(prop))
.ifNotExists() .ifNotExists()
.onTable(prop.getEntity().getName().toCql()) .onTable(prop.getEntity().getName().toCql())
.andColumn(prop.getColumnName().toCql()); .andColumn(prop.getColumnName().toCql());
@ -398,7 +406,7 @@ public final class SchemaUtil {
} }
public static SchemaStatement dropIndex(HelenusProperty prop) { public static SchemaStatement dropIndex(HelenusProperty prop) {
return SchemaBuilder.dropIndex(prop.getIndexName().get().toCql()).ifExists(); return SchemaBuilder.dropIndex(indexName(prop)).ifExists();
} }
private static SchemaBuilder.Direction mapDirection(OrderingDirection o) { private static SchemaBuilder.Direction mapDirection(OrderingDirection o) {
@ -457,4 +465,9 @@ public final class SchemaUtil {
} }
return null; return null;
} }
private static String indexName(HelenusProperty prop) {
return prop.getEntity().getName().toCql() + "_" + prop.getIndexName().get().toCql();
}
} }

View file

@ -15,7 +15,6 @@
*/ */
package net.helenus.core; package net.helenus.core;
import brave.Tracer;
import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.MetricRegistry;
import com.datastax.driver.core.*; import com.datastax.driver.core.*;
import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.MoreExecutors;
@ -25,7 +24,7 @@ import java.util.*;
import java.util.concurrent.Executor; import java.util.concurrent.Executor;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
import java.util.function.Consumer; import java.util.function.Consumer;
import net.helenus.core.cache.SessionCache; import javax.cache.CacheManager;
import net.helenus.core.reflect.DslExportable; import net.helenus.core.reflect.DslExportable;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.HelenusEntityType; import net.helenus.mapping.HelenusEntityType;
@ -43,19 +42,26 @@ public final class SessionInitializer extends AbstractSessionOperations {
private CodecRegistry registry; private CodecRegistry registry;
private String usingKeyspace; private String usingKeyspace;
private boolean showCql = false; private boolean showCql = false;
private boolean showValues = true;
private ConsistencyLevel consistencyLevel; private ConsistencyLevel consistencyLevel;
private boolean idempotent = true; private boolean idempotent = false;
private MetricRegistry metricRegistry = new MetricRegistry(); private MetricRegistry metricRegistry = new MetricRegistry();
private Tracer zipkinTracer;
private PrintStream printStream = System.out; private PrintStream printStream = System.out;
private Executor executor = MoreExecutors.directExecutor(); private Executor executor = MoreExecutors.directExecutor();
private Class<? extends UnitOfWork> unitOfWorkClass = UnitOfWorkImpl.class;
private SessionRepositoryBuilder sessionRepository; private SessionRepositoryBuilder sessionRepository;
private boolean dropUnusedColumns = false; private boolean dropUnusedColumns = false;
private boolean dropUnusedIndexes = false; private boolean dropUnusedIndexes = false;
private KeyspaceMetadata keyspaceMetadata; private KeyspaceMetadata keyspaceMetadata;
private AutoDdl autoDdl = AutoDdl.UPDATE; private AutoDdl autoDdl = AutoDdl.UPDATE;
private SessionCache sessionCache = null; private CacheManager cacheManager = null;
SessionInitializer(Session session, String keyspace) {
this.session = session;
this.usingKeyspace = keyspace;
if (session != null) {
this.sessionRepository = new SessionRepositoryBuilder(session);
}
}
SessionInitializer(Session session) { SessionInitializer(Session session) {
this.session = Objects.requireNonNull(session, "empty session"); this.session = Objects.requireNonNull(session, "empty session");
@ -103,28 +109,32 @@ public final class SessionInitializer extends AbstractSessionOperations {
return this; return this;
} }
public SessionInitializer showQueryValuesInLog(boolean showValues) {
this.showValues = showValues;
return this;
}
public SessionInitializer showQueryValuesInLog() {
this.showValues = true;
return this;
}
public boolean showValues() {
return showValues;
}
public SessionInitializer metricRegistry(MetricRegistry metricRegistry) { public SessionInitializer metricRegistry(MetricRegistry metricRegistry) {
this.metricRegistry = metricRegistry; this.metricRegistry = metricRegistry;
return this; return this;
} }
public SessionInitializer zipkinTracer(Tracer tracer) {
this.zipkinTracer = tracer;
return this;
}
public SessionInitializer setUnitOfWorkClass(Class<? extends UnitOfWork> e) {
this.unitOfWorkClass = e;
return this;
}
public SessionInitializer consistencyLevel(ConsistencyLevel consistencyLevel) { public SessionInitializer consistencyLevel(ConsistencyLevel consistencyLevel) {
this.consistencyLevel = consistencyLevel; this.consistencyLevel = consistencyLevel;
return this; return this;
} }
public SessionInitializer setSessionCache(SessionCache sessionCache) { public SessionInitializer setCacheManager(CacheManager cacheManager) {
this.sessionCache = sessionCache; this.cacheManager = cacheManager;
return this; return this;
} }
@ -132,6 +142,11 @@ public final class SessionInitializer extends AbstractSessionOperations {
return consistencyLevel; return consistencyLevel;
} }
public SessionInitializer setOperationsIdempotentByDefault() {
this.idempotent = true;
return this;
}
public SessionInitializer idempotentQueryExecution(boolean idempotent) { public SessionInitializer idempotentQueryExecution(boolean idempotent) {
this.idempotent = idempotent; this.idempotent = idempotent;
return this; return this;
@ -233,8 +248,10 @@ public final class SessionInitializer extends AbstractSessionOperations {
} }
public SessionInitializer use(String keyspace) { public SessionInitializer use(String keyspace) {
if (session != null) {
session.execute(SchemaUtil.use(keyspace, false)); session.execute(SchemaUtil.use(keyspace, false));
this.usingKeyspace = keyspace; this.usingKeyspace = keyspace;
}
return this; return this;
} }
@ -255,16 +272,15 @@ public final class SessionInitializer extends AbstractSessionOperations {
usingKeyspace, usingKeyspace,
registry, registry,
showCql, showCql,
showValues,
printStream, printStream,
sessionRepository, sessionRepository,
executor, executor,
autoDdl == AutoDdl.CREATE_DROP, autoDdl == AutoDdl.CREATE_DROP,
consistencyLevel, consistencyLevel,
idempotent, idempotent,
unitOfWorkClass, cacheManager,
sessionCache, metricRegistry);
metricRegistry,
zipkinTracer);
} }
private void initialize() { private void initialize() {
@ -281,10 +297,16 @@ public final class SessionInitializer extends AbstractSessionOperations {
} }
DslExportable dsl = (DslExportable) Helenus.dsl(iface); DslExportable dsl = (DslExportable) Helenus.dsl(iface);
if (session != null) {
dsl.setCassandraMetadataForHelenusSession(session.getCluster().getMetadata()); dsl.setCassandraMetadataForHelenusSession(session.getCluster().getMetadata());
}
if (sessionRepository != null) {
sessionRepository.add(dsl); sessionRepository.add(dsl);
}
}); });
if (session == null) return;
TableOperations tableOps = new TableOperations(this, dropUnusedColumns, dropUnusedIndexes); TableOperations tableOps = new TableOperations(this, dropUnusedColumns, dropUnusedIndexes);
UserTypeOperations userTypeOps = new UserTypeOperations(this, dropUnusedColumns); UserTypeOperations userTypeOps = new UserTypeOperations(this, dropUnusedColumns);

View file

@ -35,12 +35,12 @@ public final class TableOperations {
} }
public void createTable(HelenusEntity entity) { public void createTable(HelenusEntity entity) {
sessionOps.execute(SchemaUtil.createTable(entity), true); sessionOps.execute(SchemaUtil.createTable(entity));
executeBatch(SchemaUtil.createIndexes(entity)); executeBatch(SchemaUtil.createIndexes(entity));
} }
public void dropTable(HelenusEntity entity) { public void dropTable(HelenusEntity entity) {
sessionOps.execute(SchemaUtil.dropTable(entity), true); sessionOps.execute(SchemaUtil.dropTable(entity));
} }
public void validateTable(TableMetadata tmd, HelenusEntity entity) { public void validateTable(TableMetadata tmd, HelenusEntity entity) {
@ -79,17 +79,14 @@ public final class TableOperations {
public void createView(HelenusEntity entity) { public void createView(HelenusEntity entity) {
sessionOps.execute( sessionOps.execute(
SchemaUtil.createMaterializedView( SchemaUtil.createMaterializedView(
sessionOps.usingKeyspace(), entity.getName().toCql(), entity), sessionOps.usingKeyspace(), entity.getName().toCql(), entity));
true); // executeBatch(SchemaUtil.createIndexes(entity)); NOTE: Unfortunately C* 3.10 does not yet support 2i on materialized views.
// executeBatch(SchemaUtil.createIndexes(entity)); NOTE: Unfortunately C* 3.10
// does not yet support 2i on materialized views.
} }
public void dropView(HelenusEntity entity) { public void dropView(HelenusEntity entity) {
sessionOps.execute( sessionOps.execute(
SchemaUtil.dropMaterializedView( SchemaUtil.dropMaterializedView(
sessionOps.usingKeyspace(), entity.getName().toCql(), entity), sessionOps.usingKeyspace(), entity.getName().toCql(), entity));
true);
} }
public void updateView(TableMetadata tmd, HelenusEntity entity) { public void updateView(TableMetadata tmd, HelenusEntity entity) {
@ -104,9 +101,6 @@ public final class TableOperations {
private void executeBatch(List<SchemaStatement> list) { private void executeBatch(List<SchemaStatement> list) {
list.forEach( list.forEach(s -> sessionOps.execute(s));
s -> {
sessionOps.execute(s, true);
});
} }
} }

View file

@ -15,12 +15,164 @@
*/ */
package net.helenus.core; package net.helenus.core;
import com.google.common.base.Stopwatch; import static net.helenus.core.HelenusSession.deleted;
import java.util.List;
import java.util.Optional;
import net.helenus.core.cache.Facet;
public interface UnitOfWork<X extends Exception> extends AutoCloseable { import com.google.common.base.Stopwatch;
import com.google.common.collect.HashBasedTable;
import com.google.common.collect.Table;
import com.google.common.collect.TreeTraverser;
import java.io.Serializable;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import javax.cache.Cache;
import javax.cache.CacheManager;
import javax.cache.integration.CacheLoader;
import javax.cache.integration.CacheLoaderException;
import net.helenus.core.cache.CacheUtil;
import net.helenus.core.cache.Facet;
import net.helenus.core.cache.MapCache;
import net.helenus.core.operation.AbstractOperation;
import net.helenus.core.operation.BatchOperation;
import net.helenus.mapping.MappingUtil;
import net.helenus.support.Either;
import net.helenus.support.HelenusException;
import org.apache.commons.lang3.SerializationUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** Encapsulates the concept of a "transaction" as a unit-of-work. */
public class UnitOfWork implements AutoCloseable {
private static final Logger LOG = LoggerFactory.getLogger(UnitOfWork.class);
private static final Pattern classNameRegex =
Pattern.compile("^(?:\\w+\\.)+(?:(\\w+)|(\\w+)\\$.*)$");
private final List<UnitOfWork> nested = new ArrayList<>();
private final HelenusSession session;
public final UnitOfWork parent;
private final Table<String, String, Either<Object, List<Facet>>> cache = HashBasedTable.create();
private final MapCache<String, Object> statementCache;
protected String purpose;
protected List<String> nestedPurposes = new ArrayList<String>();
protected String info;
protected int cacheHits = 0;
protected int cacheMisses = 0;
protected int databaseLookups = 0;
protected final Stopwatch elapsedTime;
protected Map<String, Double> databaseTime = new HashMap<>();
protected double cacheLookupTimeMSecs = 0.0;
private List<CommitThunk> commitThunks = new ArrayList<CommitThunk>();
private List<CommitThunk> abortThunks = new ArrayList<CommitThunk>();
private List<CompletableFuture<?>> asyncOperationFutures = new ArrayList<CompletableFuture<?>>();
private boolean aborted = false;
private boolean committed = false;
private long committedAt = 0L;
private BatchOperation batch;
private String extractClassNameFromStackFrame(String classNameOnStack) {
String name = null;
Matcher m = classNameRegex.matcher(classNameOnStack);
if (m.find()) {
name = (m.group(1) != null) ? m.group(1) : ((m.group(2) != null) ? m.group(2) : name);
} else {
name = classNameOnStack;
}
return name;
}
public UnitOfWork(HelenusSession session) {
this(session, null);
}
public UnitOfWork(HelenusSession session, UnitOfWork parent) {
Objects.requireNonNull(session, "containing session cannot be null");
this.parent = parent;
if (parent != null) {
parent.addNestedUnitOfWork(this);
}
this.session = session;
CacheLoader cacheLoader = null;
if (parent != null) {
cacheLoader =
new CacheLoader<String, Object>() {
Cache<String, Object> cache = parent.getCache();
@Override
public Object load(String key) throws CacheLoaderException {
return cache.get(key);
}
@Override
public Map<String, Object> loadAll(Iterable<? extends String> keys)
throws CacheLoaderException {
Map<String, Object> kvp = new HashMap<String, Object>();
for (String key : keys) {
kvp.put(key, cache.get(key));
}
return kvp;
}
};
}
this.elapsedTime = Stopwatch.createUnstarted();
this.statementCache =
new MapCache<String, Object>(null, "UOW(" + hashCode() + ")", cacheLoader, true);
if (LOG.isInfoEnabled()) {
StringBuilder purpose = null;
int frame = 0;
StackTraceElement[] trace = Thread.currentThread().getStackTrace();
String targetClassName = HelenusSession.class.getSimpleName();
String stackClassName = null;
do {
frame++;
stackClassName = extractClassNameFromStackFrame(trace[frame].getClassName());
} while (!stackClassName.equals(targetClassName) && frame < trace.length);
do {
frame++;
stackClassName = extractClassNameFromStackFrame(trace[frame].getClassName());
} while (stackClassName.equals(targetClassName) && frame < trace.length);
if (frame < trace.length) {
purpose =
new StringBuilder()
.append(trace[frame].getClassName())
.append(".")
.append(trace[frame].getMethodName())
.append("(")
.append(trace[frame].getFileName())
.append(":")
.append(trace[frame].getLineNumber())
.append(")");
this.purpose = purpose.toString();
}
}
}
public void addDatabaseTime(String name, Stopwatch amount) {
Double time = databaseTime.get(name);
if (time == null) {
databaseTime.put(name, (double) amount.elapsed(TimeUnit.MICROSECONDS));
} else {
databaseTime.put(name, time + amount.elapsed(TimeUnit.MICROSECONDS));
}
}
public void addCacheLookupTime(Stopwatch amount) {
cacheLookupTimeMSecs += amount.elapsed(TimeUnit.MICROSECONDS);
}
public void addNestedUnitOfWork(UnitOfWork uow) {
synchronized (nested) {
nested.add(uow);
}
}
/** /**
* Marks the beginning of a transactional section of work. Will write a * Marks the beginning of a transactional section of work. Will write a
@ -28,44 +180,437 @@ public interface UnitOfWork<X extends Exception> extends AutoCloseable {
* *
* @return the handle used to commit or abort the work. * @return the handle used to commit or abort the work.
*/ */
UnitOfWork<X> begin(); public synchronized UnitOfWork begin() {
elapsedTime.start();
// log.record(txn::start)
return this;
}
void addNestedUnitOfWork(UnitOfWork<X> uow); public String getPurpose() {
return purpose;
}
public UnitOfWork setPurpose(String purpose) {
this.purpose = purpose;
return this;
}
public void addFuture(CompletableFuture<?> future) {
asyncOperationFutures.add(future);
}
public void setInfo(String info) {
this.info = info;
}
public void recordCacheAndDatabaseOperationCount(int cache, int ops) {
if (cache > 0) {
cacheHits += cache;
} else {
cacheMisses += Math.abs(cache);
}
if (ops > 0) {
databaseLookups += ops;
}
}
public String logTimers(String what) {
double e = (double) elapsedTime.elapsed(TimeUnit.MICROSECONDS) / 1000.0;
double d = 0.0;
double c = cacheLookupTimeMSecs / 1000.0;
double fc = (c / e) * 100.0;
String database = "";
if (databaseTime.size() > 0) {
List<String> dbt = new ArrayList<>(databaseTime.size());
for (Map.Entry<String, Double> dt : databaseTime.entrySet()) {
double t = dt.getValue() / 1000.0;
d += t;
dbt.add(String.format("%s took %,.3fms %,2.2f%%", dt.getKey(), t, (t / e) * 100.0));
}
double fd = (d / e) * 100.0;
database =
String.format(
", %d quer%s (%,.3fms %,2.2f%% - %s)",
databaseLookups, (databaseLookups > 1) ? "ies" : "y", d, fd, String.join(", ", dbt));
}
String cache = "";
if (cacheLookupTimeMSecs > 0) {
int cacheLookups = cacheHits + cacheMisses;
cache =
String.format(
" with %d cache lookup%s (%,.3fms %,2.2f%% - %,d hit, %,d miss)",
cacheLookups, cacheLookups > 1 ? "s" : "", c, fc, cacheHits, cacheMisses);
}
String da = "";
if (databaseTime.size() > 0 || cacheLookupTimeMSecs > 0) {
double dat = d + c;
double daf = (dat / e) * 100;
da =
String.format(
" consuming %,.3fms for data access, or %,2.2f%% of total UOW time.", dat, daf);
}
String x = nestedPurposes.stream().distinct().collect(Collectors.joining(", "));
String n =
nested
.stream()
.map(uow -> String.valueOf(uow.hashCode()))
.collect(Collectors.joining(", "));
String s =
String.format(
Locale.US,
"UOW(%s%s) %s in %,.3fms%s%s%s%s%s%s",
hashCode(),
(nested.size() > 0 ? ", [" + n + "]" : ""),
what,
e,
cache,
database,
da,
(purpose == null ? "" : " " + purpose),
(nestedPurposes.isEmpty()) ? "" : ", " + x,
(info == null) ? "" : " " + info);
return s;
}
private void applyPostCommitFunctions(String what, List<CommitThunk> thunks) {
if (!thunks.isEmpty()) {
for (CommitThunk f : thunks) {
f.apply();
}
}
}
public Optional<Object> cacheLookup(List<Facet> facets) {
String tableName = CacheUtil.schemaName(facets);
Optional<Object> result = Optional.empty();
for (Facet facet : facets) {
if (!facet.fixed()) {
String columnName = facet.name() + "==" + facet.value();
Either<Object, List<Facet>> eitherValue = cache.get(tableName, columnName);
if (eitherValue != null) {
Object value = deleted;
if (eitherValue.isLeft()) {
value = eitherValue.getLeft();
}
return Optional.of(value);
}
}
}
// Be sure to check all enclosing UnitOfWork caches as well, we may be nested.
result = checkParentCache(facets);
if (result.isPresent()) {
Object r = result.get();
Class<?> iface = MappingUtil.getMappingInterface(r);
if (Helenus.entity(iface).isDraftable()) {
cacheUpdate(r, facets);
} else {
cacheUpdate(SerializationUtils.<Serializable>clone((Serializable) r), facets);
}
}
return result;
}
private Optional<Object> checkParentCache(List<Facet> facets) {
Optional<Object> result = Optional.empty();
if (parent != null) {
result = parent.checkParentCache(facets);
}
return result;
}
public List<Facet> cacheEvict(List<Facet> facets) {
Either<Object, List<Facet>> deletedObjectFacets = Either.right(facets);
String tableName = CacheUtil.schemaName(facets);
Optional<Object> optionalValue = cacheLookup(facets);
for (Facet facet : facets) {
if (!facet.fixed()) {
String columnKey = facet.name() + "==" + facet.value();
// mark the value identified by the facet to `deleted`
cache.put(tableName, columnKey, deletedObjectFacets);
}
}
// Now, look for other row/col pairs that referenced the same object, mark them
// `deleted` if the cache had a value before we added the deleted marker objects.
if (optionalValue.isPresent()) {
Object value = optionalValue.get();
cache
.columnKeySet()
.forEach(
columnKey -> {
Either<Object, List<Facet>> eitherCachedValue = cache.get(tableName, columnKey);
if (eitherCachedValue.isLeft()) {
Object cachedValue = eitherCachedValue.getLeft();
if (cachedValue == value) {
cache.put(tableName, columnKey, deletedObjectFacets);
String[] parts = columnKey.split("==");
facets.add(new Facet<String>(parts[0], parts[1]));
}
}
});
}
return facets;
}
public Cache<String, Object> getCache() {
return statementCache;
}
public Object cacheUpdate(Object value, List<Facet> facets) {
Object result = null;
String tableName = CacheUtil.schemaName(facets);
for (Facet facet : facets) {
if (!facet.fixed()) {
if (facet.alone()) {
String columnName = facet.name() + "==" + facet.value();
if (result == null) result = cache.get(tableName, columnName);
cache.put(tableName, columnName, Either.left(value));
}
}
}
return result;
}
public void batch(AbstractOperation s) {
if (batch == null) {
batch = new BatchOperation(session);
}
batch.add(s);
}
private Iterator<UnitOfWork> getChildNodes() {
return nested.iterator();
}
/** /**
* Checks to see if the work performed between calling begin and now can be committed or not. * Checks to see if the work performed between calling begin and now can be committed or not.
* *
* @return a function from which to chain work that only happens when commit is successful * @return a function from which to chain work that only happens when commit is successful
* @throws X when the work overlaps with other concurrent writers. * @throws HelenusException when the work overlaps with other concurrent writers.
*/ */
PostCommitFunction<Void, Void> commit() throws X; public synchronized PostCommitFunction<Void, Void> commit()
throws HelenusException, TimeoutException {
if (isDone()) {
return new PostCommitFunction(this, null, null, false);
}
// Only the outer-most UOW batches statements for commit time, execute them.
if (batch != null) {
committedAt = batch.sync(this); //TODO(gburd): update cache with writeTime...
}
// All nested UnitOfWork should be committed (not aborted) before calls to
// commit, check.
boolean canCommit = true;
TreeTraverser<UnitOfWork> traverser = TreeTraverser.using(node -> node::getChildNodes);
for (UnitOfWork uow : traverser.postOrderTraversal(this)) {
if (this != uow) {
canCommit &= (!uow.aborted && uow.committed);
}
}
if (!canCommit) {
if (parent == null) {
// Apply all post-commit abort functions, this is the outer-most UnitOfWork.
traverser
.postOrderTraversal(this)
.forEach(
uow -> {
applyPostCommitFunctions("aborted", abortThunks);
});
elapsedTime.stop();
if (LOG.isInfoEnabled()) {
LOG.info(logTimers("aborted"));
}
}
return new PostCommitFunction(this, null, null, false);
} else {
committed = true;
aborted = false;
if (parent == null) {
// Apply all post-commit commit functions, this is the outer-most UnitOfWork.
traverser
.postOrderTraversal(this)
.forEach(
uow -> {
applyPostCommitFunctions("committed", uow.commitThunks);
});
// Merge our statement cache into the session cache if it exists.
CacheManager cacheManager = session.getCacheManager();
if (cacheManager != null) {
for (Map.Entry<String, Object> entry :
(Set<Map.Entry<String, Object>>) statementCache.<Map>unwrap(Map.class).entrySet()) {
String[] keyParts = entry.getKey().split("\\.");
if (keyParts.length == 2) {
String cacheName = keyParts[0];
String key = keyParts[1];
if (!StringUtils.isBlank(cacheName) && !StringUtils.isBlank(key)) {
Cache<Object, Object> cache = cacheManager.getCache(cacheName);
if (cache != null) {
Object value = entry.getValue();
if (value == deleted) {
cache.remove(key);
} else {
cache.put(key.toString(), value);
}
}
}
}
}
}
// Merge our cache into the session cache.
session.mergeCache(cache);
// Spoil any lingering futures that may be out there.
asyncOperationFutures.forEach(
f ->
f.completeExceptionally(
new HelenusException(
"Futures must be resolved before their unit of work has committed/aborted.")));
elapsedTime.stop();
if (LOG.isInfoEnabled()) {
LOG.info(logTimers("committed"));
}
return new PostCommitFunction(this, null, null, true);
} else {
// Merge cache and statistics into parent if there is one.
parent.statementCache.putAll(statementCache.<Map>unwrap(Map.class));
parent.mergeCache(cache);
parent.addBatched(batch);
if (purpose != null) {
parent.nestedPurposes.add(purpose);
}
parent.cacheHits += cacheHits;
parent.cacheMisses += cacheMisses;
parent.databaseLookups += databaseLookups;
parent.cacheLookupTimeMSecs += cacheLookupTimeMSecs;
for (Map.Entry<String, Double> dt : databaseTime.entrySet()) {
String name = dt.getKey();
if (parent.databaseTime.containsKey(name)) {
double t = parent.databaseTime.get(name);
parent.databaseTime.put(name, t + dt.getValue());
} else {
parent.databaseTime.put(name, dt.getValue());
}
}
}
}
// TODO(gburd): hopefully we'll be able to detect conflicts here and so we'd want to...
// else {
// Constructor<T> ctor = clazz.getConstructor(conflictExceptionClass);
// T object = ctor.newInstance(new Object[] { String message });
// }
return new PostCommitFunction(this, commitThunks, abortThunks, true);
}
private void addBatched(BatchOperation batch) {
if (batch != null) {
if (this.batch == null) {
this.batch = batch;
} else {
this.batch.addAll(batch);
}
}
}
/** /**
* Explicitly abort the work within this unit of work. Any nested aborted unit of work will * Explicitly abort the work within this unit of work. Any nested aborted unit of work will
* trigger the entire unit of work to commit. * trigger the entire unit of work to commit.
*/ */
void abort(); public synchronized void abort() {
if (!aborted) {
aborted = true;
boolean hasAborted(); // Spoil any pending futures created within the context of this unit of work.
asyncOperationFutures.forEach(
f ->
f.completeExceptionally(
new HelenusException(
"Futures must be resolved before their unit of work has committed/aborted.")));
boolean hasCommitted(); TreeTraverser<UnitOfWork> traverser = TreeTraverser.using(node -> node::getChildNodes);
traverser
.postOrderTraversal(this)
.forEach(
uow -> {
applyPostCommitFunctions("aborted", uow.abortThunks);
uow.abortThunks.clear();
});
Optional<Object> cacheLookup(List<Facet> facets); if (parent == null) {
elapsedTime.stop();
void cacheUpdate(Object pojo, List<Facet> facets); if (LOG.isInfoEnabled()) {
LOG.info(logTimers("aborted"));
List<Facet> cacheEvict(List<Facet> facets); }
}
String getPurpose();
// TODO(gburd): when we integrate the transaction support we'll need to...
UnitOfWork setPurpose(String purpose); // log.record(txn::abort)
// cache.invalidateSince(txn::start time)
void setInfo(String info); }
}
void addDatabaseTime(String name, Stopwatch amount);
private void mergeCache(Table<String, String, Either<Object, List<Facet>>> from) {
void addCacheLookupTime(Stopwatch amount); Table<String, String, Either<Object, List<Facet>>> to = this.cache;
from.rowMap()
// Cache > 0 means "cache hit", < 0 means cache miss. .forEach(
void recordCacheAndDatabaseOperationCount(int cache, int database); (rowKey, columnMap) -> {
columnMap.forEach(
(columnKey, value) -> {
if (to.contains(rowKey, columnKey)) {
to.put(
rowKey,
columnKey,
Either.left(
CacheUtil.merge(
to.get(rowKey, columnKey).getLeft(),
from.get(rowKey, columnKey).getLeft())));
} else {
to.put(rowKey, columnKey, from.get(rowKey, columnKey));
}
});
});
}
public boolean isDone() {
return aborted || committed;
}
public String describeConflicts() {
return "it's complex...";
}
@Override
public void close() throws HelenusException {
// Closing a UnitOfWork will abort iff we've not already aborted or committed this unit of work.
if (aborted == false && committed == false) {
abort();
}
}
public boolean hasAborted() {
return aborted;
}
public boolean hasCommitted() {
return committed;
}
public long committedAt() {
return committedAt;
}
} }

View file

@ -33,12 +33,12 @@ public final class UserTypeOperations {
public void createUserType(HelenusEntity entity) { public void createUserType(HelenusEntity entity) {
sessionOps.execute(SchemaUtil.createUserType(entity), true); sessionOps.execute(SchemaUtil.createUserType(entity));
} }
public void dropUserType(HelenusEntity entity) { public void dropUserType(HelenusEntity entity) {
sessionOps.execute(SchemaUtil.dropUserType(entity), true); sessionOps.execute(SchemaUtil.dropUserType(entity));
} }
public void validateUserType(UserType userType, HelenusEntity entity) { public void validateUserType(UserType userType, HelenusEntity entity) {
@ -71,9 +71,6 @@ public final class UserTypeOperations {
private void executeBatch(List<SchemaStatement> list) { private void executeBatch(List<SchemaStatement> list) {
list.forEach( list.forEach(s -> sessionOps.execute(s));
s -> {
sessionOps.execute(s, true);
});
} }
} }

View file

@ -17,6 +17,7 @@ package net.helenus.core.cache;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
@ -29,6 +30,10 @@ public class BoundFacet extends Facet<String> {
this.properties.put(property, value); this.properties.put(property, value);
} }
public Set<HelenusProperty> getProperties() {
return properties.keySet();
}
public BoundFacet(String name, Map<HelenusProperty, Object> properties) { public BoundFacet(String name, Map<HelenusProperty, Object> properties) {
super( super(
name, name,

View file

@ -1,8 +1,17 @@
package net.helenus.core.cache; package net.helenus.core.cache;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import net.helenus.core.Helenus;
import net.helenus.core.reflect.Entity;
import net.helenus.core.reflect.MapExportable;
import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.HelenusProperty;
import net.helenus.mapping.MappingUtil;
import net.helenus.mapping.value.BeanColumnValueProvider;
public class CacheUtil { public class CacheUtil {
@ -29,6 +38,16 @@ public class CacheUtil {
} }
} }
public static List<String> flatKeys(String table, List<Facet> facets) {
return flattenFacets(facets)
.stream()
.map(
combination -> {
return table + "." + Arrays.toString(combination);
})
.collect(Collectors.toList());
}
public static List<String[]> flattenFacets(List<Facet> facets) { public static List<String[]> flattenFacets(List<Facet> facets) {
List<String[]> combinations = List<String[]> combinations =
CacheUtil.combinations( CacheUtil.combinations(
@ -41,26 +60,137 @@ public class CacheUtil {
return facet.name() + "==" + facet.value(); return facet.name() + "==" + facet.value();
}) })
.collect(Collectors.toList())); .collect(Collectors.toList()));
// TODO(gburd): rework so as to not generate the combinations at all rather than filter
facets =
facets
.stream()
.filter(f -> !f.fixed())
.filter(f -> !f.alone() || !f.combined())
.collect(Collectors.toList());
for (Facet facet : facets) {
combinations =
combinations
.stream()
.filter(
combo -> {
// When used alone, this facet is not distinct so don't use it as a key.
if (combo.length == 1) {
if (!facet.alone() && combo[0].startsWith(facet.name() + "==")) {
return false;
}
} else {
if (!facet.combined()) {
for (String c : combo) {
// Don't use this facet in combination with others to create keys.
if (c.startsWith(facet.name() + "==")) {
return false;
}
}
}
}
return true;
})
.collect(Collectors.toList());
}
return combinations; return combinations;
} }
public static Object merge(Object to, Object from) { /** Merge changed values in the map behind `from` into `to`. */
if (to == from) { public static Object merge(Object t, Object f) {
return to; HelenusEntity entity = Helenus.resolve(MappingUtil.getMappingInterface(t));
} else {
return from; if (t == f) return t;
if (f == null) return t;
if (t == null) return f;
if (t instanceof MapExportable
&& t instanceof Entity
&& f instanceof MapExportable
&& f instanceof Entity) {
Entity to = (Entity) t;
Entity from = (Entity) f;
Map<String, Object> toValueMap = ((MapExportable) to).toMap();
Map<String, Object> fromValueMap = ((MapExportable) from).toMap();
for (HelenusProperty prop : entity.getOrderedProperties()) {
switch (prop.getColumnType()) {
case PARTITION_KEY:
case CLUSTERING_COLUMN:
continue;
default:
Object toVal = BeanColumnValueProvider.INSTANCE.getColumnValue(to, -1, prop, false);
Object fromVal = BeanColumnValueProvider.INSTANCE.getColumnValue(from, -1, prop, false);
String ttlKey = ttlKey(prop);
String writeTimeKey = writeTimeKey(prop);
int[] toTtlI = (int[]) toValueMap.get(ttlKey);
int toTtl = (toTtlI != null) ? toTtlI[0] : 0;
Long toWriteTime = (Long) toValueMap.get(writeTimeKey);
int[] fromTtlI = (int[]) fromValueMap.get(ttlKey);
int fromTtl = (fromTtlI != null) ? fromTtlI[0] : 0;
Long fromWriteTime = (Long) fromValueMap.get(writeTimeKey);
if (toVal != null) {
if (fromVal != null) {
if (toVal == fromVal) {
// Case: object identity
// Goal: ensure write time and ttl are also in sync
if (fromWriteTime != null
&& fromWriteTime != 0L
&& (toWriteTime == null || fromWriteTime > toWriteTime)) {
((MapExportable) to).put(writeTimeKey, fromWriteTime);
} }
/* if (fromTtl > 0 && fromTtl > toTtl) {
* // TODO(gburd): take ttl and writeTime into account when merging. Map<String, ((MapExportable) to).put(ttlKey, fromTtl);
* Object> toValueMap = to instanceof MapExportable ? ((MapExportable) }
* to).toMap() : null; Map<String, Object> fromValueMap = to instanceof } else if (fromWriteTime != null && fromWriteTime != 0L) {
* MapExportable ? ((MapExportable) from).toMap() : null; // Case: to exists and from exists
* // Goal: copy over from -> to iff from.writeTime > to.writeTime
* if (toValueMap != null && fromValueMap != null) { for (String key : if (toWriteTime != null && toWriteTime != 0L) {
* fromValueMap.keySet()) { if (toValueMap.containsKey(key) && if (fromWriteTime > toWriteTime) {
* toValueMap.get(key) != fromValueMap.get(key)) { toValueMap.put(key, ((MapExportable) to).put(prop.getPropertyName(), fromVal);
* fromValueMap.get(key)); } } } return to; ((MapExportable) to).put(writeTimeKey, fromWriteTime);
*/ if (fromTtl > 0) {
((MapExportable) to).put(ttlKey, fromTtl);
}
}
} else {
((MapExportable) to).put(prop.getPropertyName(), fromVal);
((MapExportable) to).put(writeTimeKey, fromWriteTime);
if (fromTtl > 0) {
((MapExportable) to).put(ttlKey, fromTtl);
}
}
} else {
if (toWriteTime == null || toWriteTime == 0L) {
// Caution, entering grey area...
if (!toVal.equals(fromVal)) {
// dangerous waters here, values diverge without information that enables resolution,
// policy (for now) is to move value from -> to anyway.
((MapExportable) to).put(prop.getPropertyName(), fromVal);
if (fromTtl > 0) {
((MapExportable) to).put(ttlKey, fromTtl);
}
}
}
}
}
} else {
// Case: from exists, but to doesn't (it's null)
// Goal: copy over from -> to, include ttl and writeTime if present
if (fromVal != null) {
((MapExportable) to).put(prop.getPropertyName(), fromVal);
if (fromWriteTime != null && fromWriteTime != 0L) {
((MapExportable) to).put(writeTimeKey, fromWriteTime);
}
if (fromTtl > 0) {
((MapExportable) to).put(ttlKey, fromTtl);
}
}
}
}
}
return to;
}
return t;
} }
public static String schemaName(List<Facet> facets) { public static String schemaName(List<Facet> facets) {
@ -70,4 +200,22 @@ public class CacheUtil {
.map(facet -> facet.value().toString()) .map(facet -> facet.value().toString())
.collect(Collectors.joining(".")); .collect(Collectors.joining("."));
} }
public static String writeTimeKey(HelenusProperty prop) {
return writeTimeKey(prop.getColumnName().toCql(false));
}
public static String ttlKey(HelenusProperty prop) {
return ttlKey(prop.getColumnName().toCql(false));
}
public static String writeTimeKey(String columnName) {
String key = "_" + columnName + "_writeTime";
return key.toLowerCase();
}
public static String ttlKey(String columnName) {
String key = "_" + columnName + "_ttl";
return key.toLowerCase();
}
} }

View file

@ -21,6 +21,8 @@ public class Facet<T> {
private final String name; private final String name;
private T value; private T value;
private boolean fixed = false; private boolean fixed = false;
private boolean alone = true;
private boolean combined = true;
public Facet(String name) { public Facet(String name) {
this.name = name; this.name = name;
@ -47,4 +49,20 @@ public class Facet<T> {
public boolean fixed() { public boolean fixed() {
return fixed; return fixed;
} }
public void setUniquelyIdentifyingWhenAlone(boolean alone) {
this.alone = alone;
}
public void setUniquelyIdentifyingWhenCombined(boolean combined) {
this.combined = combined;
}
public boolean alone() {
return alone;
}
public boolean combined() {
return combined;
}
} }

View file

@ -0,0 +1,457 @@
package net.helenus.core.cache;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import javax.cache.Cache;
import javax.cache.CacheManager;
import javax.cache.configuration.CacheEntryListenerConfiguration;
import javax.cache.configuration.Configuration;
import javax.cache.event.CacheEntryRemovedListener;
import javax.cache.integration.CacheLoader;
import javax.cache.integration.CompletionListener;
import javax.cache.processor.EntryProcessor;
import javax.cache.processor.EntryProcessorException;
import javax.cache.processor.EntryProcessorResult;
import javax.cache.processor.MutableEntry;
public class MapCache<K, V> implements Cache<K, V> {
private final CacheManager manager;
private final String name;
private Map<K, V> map = new ConcurrentHashMap<K, V>();
private Set<CacheEntryRemovedListener> cacheEntryRemovedListeners = new HashSet<>();
private CacheLoader<K, V> cacheLoader = null;
private boolean isReadThrough = false;
private Configuration<K, V> configuration = new MapConfiguration<K, V>();
private static class MapConfiguration<K, V> implements Configuration<K, V> {
@Override
public Class<K> getKeyType() {
return null;
}
@Override
public Class<V> getValueType() {
return null;
}
@Override
public boolean isStoreByValue() {
return false;
}
}
public MapCache(
CacheManager manager, String name, CacheLoader<K, V> cacheLoader, boolean isReadThrough) {
this.manager = manager;
this.name = name;
this.cacheLoader = cacheLoader;
this.isReadThrough = isReadThrough;
}
/** {@inheritDoc} */
@Override
public V get(K key) {
V value = null;
synchronized (map) {
value = map.get(key);
if (value == null && isReadThrough && cacheLoader != null) {
V loadedValue = cacheLoader.load(key);
if (loadedValue != null) {
map.put(key, loadedValue);
value = loadedValue;
}
}
}
return value;
}
/** {@inheritDoc} */
@Override
public Map<K, V> getAll(Set<? extends K> keys) {
Map<K, V> result = null;
synchronized (map) {
result = new HashMap<K, V>(keys.size());
for (K key : keys) {
V value = map.get(key);
if (value != null) {
result.put(key, value);
keys.remove(key);
}
}
if (isReadThrough && cacheLoader != null) {
for (K key : keys) {
Map<K, V> loadedValues = cacheLoader.loadAll(keys);
for (Map.Entry<K, V> entry : loadedValues.entrySet()) {
V v = entry.getValue();
if (v != null) {
K k = entry.getKey();
map.put(k, v);
result.put(k, v);
}
}
}
}
}
return result;
}
/** {@inheritDoc} */
@Override
public boolean containsKey(K key) {
return map.containsKey(key);
}
/** {@inheritDoc} */
@Override
public void loadAll(
Set<? extends K> keys, boolean replaceExistingValues, CompletionListener completionListener) {
if (cacheLoader != null) {
try {
synchronized (map) {
Map<K, V> loadedValues = cacheLoader.loadAll(keys);
for (Map.Entry<K, V> entry : loadedValues.entrySet()) {
V value = entry.getValue();
K key = entry.getKey();
if (value != null) {
boolean existsCurrently = map.containsKey(key);
if (!existsCurrently || replaceExistingValues) {
map.put(key, value);
keys.remove(key);
}
}
}
}
} catch (Exception e) {
if (completionListener != null) {
completionListener.onException(e);
}
}
}
if (completionListener != null) {
if (keys.isEmpty()) {
completionListener.onCompletion();
}
}
}
/** {@inheritDoc} */
@Override
public void put(K key, V value) {
map.put(key, value);
}
/** {@inheritDoc} */
@Override
public V getAndPut(K key, V value) {
V result = null;
synchronized (map) {
result = map.get(key);
if (value == null && isReadThrough && cacheLoader != null) {
V loadedValue = cacheLoader.load(key);
if (loadedValue != null) {
map.put(key, value);
value = loadedValue;
}
}
map.put(key, value);
}
return result;
}
/** {@inheritDoc} */
@Override
public void putAll(Map<? extends K, ? extends V> map) {
synchronized (map) {
for (Map.Entry<? extends K, ? extends V> entry : map.entrySet()) {
this.map.put(entry.getKey(), entry.getValue());
}
}
}
/** {@inheritDoc} */
@Override
public boolean putIfAbsent(K key, V value) {
synchronized (map) {
if (!map.containsKey(key)) {
map.put(key, value);
return true;
} else {
return false;
}
}
}
/** {@inheritDoc} */
@Override
public boolean remove(K key) {
boolean removed = false;
synchronized (map) {
removed = map.remove(key) != null;
notifyRemovedListeners(key);
}
return removed;
}
/** {@inheritDoc} */
@Override
public boolean remove(K key, V oldValue) {
synchronized (map) {
V value = map.get(key);
if (value != null && oldValue.equals(value)) {
map.remove(key);
notifyRemovedListeners(key);
return true;
}
}
return false;
}
/** {@inheritDoc} */
@Override
public V getAndRemove(K key) {
synchronized (map) {
V oldValue = null;
oldValue = map.get(key);
map.remove(key);
notifyRemovedListeners(key);
return oldValue;
}
}
/** {@inheritDoc} */
@Override
public boolean replace(K key, V oldValue, V newValue) {
synchronized (map) {
V value = map.get(key);
if (value != null && oldValue.equals(value)) {
map.put(key, newValue);
return true;
}
}
return false;
}
/** {@inheritDoc} */
@Override
public boolean replace(K key, V value) {
synchronized (map) {
if (map.containsKey(key)) {
map.put(key, value);
return true;
}
}
return false;
}
/** {@inheritDoc} */
@Override
public V getAndReplace(K key, V value) {
synchronized (map) {
V oldValue = map.get(key);
if (value != null && value.equals(oldValue)) {
map.put(key, value);
return oldValue;
}
}
return null;
}
/** {@inheritDoc} */
@Override
public void removeAll(Set<? extends K> keys) {
synchronized (map) {
for (K key : keys) {
if (map.containsKey(key)) {
map.remove(key);
} else {
keys.remove(key);
}
}
}
notifyRemovedListeners(keys);
}
/** {@inheritDoc} */
@Override
public void removeAll() {
synchronized (map) {
Set<K> keys = map.keySet();
map.clear();
notifyRemovedListeners(keys);
}
}
/** {@inheritDoc} */
@Override
public void clear() {
map.clear();
}
/** {@inheritDoc} */
@Override
public <C extends Configuration<K, V>> C getConfiguration(Class<C> clazz) {
if (!MapConfiguration.class.isAssignableFrom(clazz)) {
throw new IllegalArgumentException();
}
return null;
}
/** {@inheritDoc} */
@Override
public <T> T invoke(K key, EntryProcessor<K, V, T> entryProcessor, Object... arguments)
throws EntryProcessorException {
return null;
}
/** {@inheritDoc} */
@Override
public <T> Map<K, EntryProcessorResult<T>> invokeAll(
Set<? extends K> keys, EntryProcessor<K, V, T> entryProcessor, Object... arguments) {
synchronized (map) {
for (K key : keys) {
V value = map.get(key);
if (value != null) {
entryProcessor.process(
new MutableEntry<K, V>() {
@Override
public boolean exists() {
return map.containsKey(key);
}
@Override
public void remove() {
synchronized (map) {
V value = map.get(key);
if (value != null) {
map.remove(key);
notifyRemovedListeners(key);
}
}
}
@Override
public K getKey() {
return key;
}
@Override
public V getValue() {
return map.get(value);
}
@Override
public <T> T unwrap(Class<T> clazz) {
return null;
}
@Override
public void setValue(V value) {
map.put(key, value);
}
},
arguments);
}
}
}
return null;
}
/** {@inheritDoc} */
@Override
public String getName() {
return name;
}
/** {@inheritDoc} */
@Override
public CacheManager getCacheManager() {
return manager;
}
/** {@inheritDoc} */
@Override
public void close() {}
/** {@inheritDoc} */
@Override
public boolean isClosed() {
return false;
}
/** {@inheritDoc} */
@Override
public <T> T unwrap(Class<T> clazz) {
return (T) map;
}
/** {@inheritDoc} */
@Override
public void registerCacheEntryListener(
CacheEntryListenerConfiguration<K, V> cacheEntryListenerConfiguration) {
//cacheEntryRemovedListeners.add(cacheEntryListenerConfiguration.getCacheEntryListenerFactory().create());
}
/** {@inheritDoc} */
@Override
public void deregisterCacheEntryListener(
CacheEntryListenerConfiguration<K, V> cacheEntryListenerConfiguration) {}
/** {@inheritDoc} */
@Override
public Iterator<Entry<K, V>> iterator() {
synchronized (map) {
return new Iterator<Entry<K, V>>() {
Iterator<Map.Entry<K, V>> entries = map.entrySet().iterator();
@Override
public boolean hasNext() {
return entries.hasNext();
}
@Override
public Entry<K, V> next() {
Map.Entry<K, V> entry = entries.next();
return new Entry<K, V>() {
K key = entry.getKey();
V value = entry.getValue();
@Override
public K getKey() {
return key;
}
@Override
public V getValue() {
return value;
}
@Override
public <T> T unwrap(Class<T> clazz) {
return null;
}
};
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
}
private void notifyRemovedListeners(K key) {
// if (cacheEntryRemovedListeners != null) {
// cacheEntryRemovedListeners.forEach(listener -> listener.onRemoved())
// }
}
private void notifyRemovedListeners(Set<? extends K> keys) {}
}

View file

@ -1,60 +0,0 @@
/*
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.helenus.core.cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.RemovalListener;
import com.google.common.cache.RemovalNotification;
import java.util.concurrent.TimeUnit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public interface SessionCache<K, V> {
static final Logger LOG = LoggerFactory.getLogger(SessionCache.class);
static <K, V> SessionCache<K, V> defaultCache() {
GuavaCache<K, V> cache;
RemovalListener<K, V> listener =
new RemovalListener<K, V>() {
@Override
public void onRemoval(RemovalNotification<K, V> n) {
if (n.wasEvicted()) {
String cause = n.getCause().name();
LOG.info(cause);
}
}
};
cache =
new GuavaCache<K, V>(
CacheBuilder.newBuilder()
.maximumSize(25_000)
.expireAfterAccess(5, TimeUnit.MINUTES)
.softValues()
.removalListener(listener)
.build());
return cache;
}
void invalidate(K key);
V get(K key);
void put(K key, V value);
}

View file

@ -25,16 +25,30 @@ import net.helenus.mapping.HelenusProperty;
public class UnboundFacet extends Facet<String> { public class UnboundFacet extends Facet<String> {
private final List<HelenusProperty> properties; private final List<HelenusProperty> properties;
private final boolean alone;
private final boolean combined;
public UnboundFacet(List<HelenusProperty> properties) { public UnboundFacet(List<HelenusProperty> properties, boolean alone, boolean combined) {
super(SchemaUtil.createPrimaryKeyPhrase(properties)); super(SchemaUtil.createPrimaryKeyPhrase(properties));
this.properties = properties; this.properties = properties;
this.alone = alone;
this.combined = combined;
} }
public UnboundFacet(HelenusProperty property) { public UnboundFacet(List<HelenusProperty> properties) {
this(properties, true, true);
}
public UnboundFacet(HelenusProperty property, boolean alone, boolean combined) {
super(property.getPropertyName()); super(property.getPropertyName());
properties = new ArrayList<HelenusProperty>(); properties = new ArrayList<HelenusProperty>();
properties.add(property); properties.add(property);
this.alone = alone;
this.combined = combined;
}
public UnboundFacet(HelenusProperty property) {
this(property, true, true);
} }
public List<HelenusProperty> getProperties() { public List<HelenusProperty> getProperties() {
@ -42,18 +56,22 @@ public class UnboundFacet extends Facet<String> {
} }
public Binder binder() { public Binder binder() {
return new Binder(name(), properties); return new Binder(name(), properties, alone, combined);
} }
public static class Binder { public static class Binder {
private final String name; private final String name;
private final boolean alone;
private final boolean combined;
private final List<HelenusProperty> properties = new ArrayList<HelenusProperty>(); private final List<HelenusProperty> properties = new ArrayList<HelenusProperty>();
private Map<HelenusProperty, Object> boundProperties = new HashMap<HelenusProperty, Object>(); private Map<HelenusProperty, Object> boundProperties = new HashMap<HelenusProperty, Object>();
Binder(String name, List<HelenusProperty> properties) { Binder(String name, List<HelenusProperty> properties, boolean alone, boolean combined) {
this.name = name; this.name = name;
this.properties.addAll(properties); this.properties.addAll(properties);
this.alone = alone;
this.combined = combined;
} }
public Binder setValueForProperty(HelenusProperty prop, Object value) { public Binder setValueForProperty(HelenusProperty prop, Object value) {
@ -67,7 +85,10 @@ public class UnboundFacet extends Facet<String> {
} }
public BoundFacet bind() { public BoundFacet bind() {
return new BoundFacet(name, boundProperties); BoundFacet facet = new BoundFacet(name, boundProperties);
facet.setUniquelyIdentifyingWhenAlone(alone);
facet.setUniquelyIdentifyingWhenCombined(combined);
return facet;
} }
} }
} }

View file

@ -19,6 +19,7 @@ import java.util.*;
import net.helenus.core.*; import net.helenus.core.*;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
import net.helenus.core.cache.UnboundFacet; import net.helenus.core.cache.UnboundFacet;
import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
public abstract class AbstractFilterOperation<E, O extends AbstractFilterOperation<E, O>> public abstract class AbstractFilterOperation<E, O extends AbstractFilterOperation<E, O>>
@ -108,6 +109,28 @@ public abstract class AbstractFilterOperation<E, O extends AbstractFilterOperati
ifFilters.add(filter); ifFilters.add(filter);
} }
@Override
protected boolean isIdempotentOperation() {
if (filters == null) {
return super.isIdempotentOperation();
}
return filters
.stream()
.anyMatch(
filter -> {
HelenusPropertyNode node = filter.getNode();
if (node != null) {
HelenusProperty prop = node.getProperty();
if (prop != null) {
return prop.isIdempotent();
}
}
return false;
})
|| super.isIdempotentOperation();
}
protected List<Facet> bindFacetValues(List<Facet> facets) { protected List<Facet> bindFacetValues(List<Facet> facets) {
if (facets == null) { if (facets == null) {
return new ArrayList<Facet>(); return new ArrayList<Facet>();

View file

@ -42,7 +42,7 @@ public abstract class AbstractFilterStreamOperation<
public <V> O where(Getter<V> getter, Operator operator, V val) { public <V> O where(Getter<V> getter, Operator operator, V val) {
addFilter(Filter.create(getter, operator, val)); if (val != null) addFilter(Filter.create(getter, operator, val));
return (O) this; return (O) this;
} }
@ -63,7 +63,7 @@ public abstract class AbstractFilterStreamOperation<
public <V> O and(Getter<V> getter, Operator operator, V val) { public <V> O and(Getter<V> getter, Operator operator, V val) {
addFilter(Filter.create(getter, operator, val)); if (val != null) addFilter(Filter.create(getter, operator, val));
return (O) this; return (O) this;
} }
@ -84,7 +84,7 @@ public abstract class AbstractFilterStreamOperation<
public <V> O onlyIf(Getter<V> getter, Operator operator, V val) { public <V> O onlyIf(Getter<V> getter, Operator operator, V val) {
addIfFilter(Filter.create(getter, operator, val)); if (val != null) addIfFilter(Filter.create(getter, operator, val));
return (O) this; return (O) this;
} }

View file

@ -41,13 +41,7 @@ public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>>
try { try {
ResultSet resultSet = ResultSet resultSet =
this.execute( this.execute(
sessionOps, sessionOps, null, queryExecutionTimeout, queryTimeoutUnits, showValues, false);
null,
traceContext,
queryExecutionTimeout,
queryTimeoutUnits,
showValues,
false);
return transform(resultSet); return transform(resultSet);
} finally { } finally {
context.stop(); context.stop();
@ -60,14 +54,7 @@ public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>>
final Timer.Context context = requestLatency.time(); final Timer.Context context = requestLatency.time();
try { try {
ResultSet resultSet = ResultSet resultSet =
execute( execute(sessionOps, uow, queryExecutionTimeout, queryTimeoutUnits, showValues, true);
sessionOps,
uow,
traceContext,
queryExecutionTimeout,
queryTimeoutUnits,
showValues,
true);
E result = transform(resultSet); E result = transform(resultSet);
return result; return result;
} finally { } finally {
@ -88,7 +75,8 @@ public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>>
public CompletableFuture<E> async(UnitOfWork uow) { public CompletableFuture<E> async(UnitOfWork uow) {
if (uow == null) return async(); if (uow == null) return async();
return CompletableFuture.<E>supplyAsync( CompletableFuture<E> f =
CompletableFuture.<E>supplyAsync(
() -> { () -> {
try { try {
return sync(); return sync();
@ -96,5 +84,7 @@ public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>>
throw new CompletionException(ex); throw new CompletionException(ex);
} }
}); });
uow.addFuture(f);
return f;
} }
} }

View file

@ -24,15 +24,20 @@ import com.google.common.base.Function;
import com.google.common.base.Stopwatch; import com.google.common.base.Stopwatch;
import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListenableFuture;
import java.io.Serializable;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException; import java.util.concurrent.CompletionException;
import java.util.concurrent.TimeoutException; import java.util.concurrent.TimeoutException;
import net.helenus.core.AbstractSessionOperations; import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.Helenus;
import net.helenus.core.UnitOfWork; import net.helenus.core.UnitOfWork;
import net.helenus.core.cache.CacheUtil; import net.helenus.core.cache.CacheUtil;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
import net.helenus.mapping.MappingUtil;
import net.helenus.support.Fun;
import org.apache.commons.lang3.SerializationUtils;
public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOperation<E, O>> public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOperation<E, O>>
extends AbstractStatementOperation<E, O> { extends AbstractStatementOperation<E, O> {
@ -64,10 +69,12 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
try { try {
Optional<E> result = Optional.empty(); Optional<E> result = Optional.empty();
E cacheResult = null; E cacheResult = null;
boolean updateCache = isSessionCacheable() && checkCache; boolean updateCache = isSessionCacheable() && !ignoreCache();
if (checkCache && isSessionCacheable()) { if (updateCache) {
List<Facet> facets = bindFacetValues(); List<Facet> facets = bindFacetValues();
if (facets != null && facets.size() > 0) {
if (facets.stream().filter(f -> !f.fixed()).distinct().count() > 0) {
String tableName = CacheUtil.schemaName(facets); String tableName = CacheUtil.schemaName(facets);
cacheResult = (E) sessionOps.checkCache(tableName, facets); cacheResult = (E) sessionOps.checkCache(tableName, facets);
if (cacheResult != null) { if (cacheResult != null) {
@ -80,6 +87,10 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
cacheMiss.mark(); cacheMiss.mark();
} }
} }
} else {
//TODO(gburd): look in statement cache for results
}
}
if (!result.isPresent()) { if (!result.isPresent()) {
// Formulate the query and execute it against the Cassandra cluster. // Formulate the query and execute it against the Cassandra cluster.
@ -87,20 +98,24 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
this.execute( this.execute(
sessionOps, sessionOps,
null, null,
traceContext,
queryExecutionTimeout, queryExecutionTimeout,
queryTimeoutUnits, queryTimeoutUnits,
showValues, showValues,
false); isSessionCacheable());
// Transform the query result set into the desired shape. // Transform the query result set into the desired shape.
result = transform(resultSet); result = transform(resultSet);
} }
if (updateCache && result.isPresent()) { if (updateCache && result.isPresent()) {
E r = result.get();
Class<?> resultClass = r.getClass();
if (!(resultClass.getEnclosingClass() != null
&& resultClass.getEnclosingClass() == Fun.class)) {
List<Facet> facets = getFacets(); List<Facet> facets = getFacets();
if (facets != null && facets.size() > 1) { if (facets != null && facets.size() > 1) {
sessionOps.updateCache(result.get(), facets); sessionOps.updateCache(r, facets);
}
} }
} }
return result; return result;
@ -109,7 +124,7 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
} }
} }
public Optional<E> sync(UnitOfWork<?> uow) throws TimeoutException { public Optional<E> sync(UnitOfWork uow) throws TimeoutException {
if (uow == null) return sync(); if (uow == null) return sync();
final Timer.Context context = requestLatency.time(); final Timer.Context context = requestLatency.time();
@ -119,11 +134,12 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
E cachedResult = null; E cachedResult = null;
final boolean updateCache; final boolean updateCache;
if (checkCache) { if (!ignoreCache()) {
Stopwatch timer = Stopwatch.createStarted(); Stopwatch timer = Stopwatch.createStarted();
try { try {
List<Facet> facets = bindFacetValues(); List<Facet> facets = bindFacetValues();
if (facets != null) { if (facets != null && facets.size() > 0) {
if (facets.stream().filter(f -> !f.fixed()).distinct().count() > 0) {
cachedResult = checkCache(uow, facets); cachedResult = checkCache(uow, facets);
if (cachedResult != null) { if (cachedResult != null) {
updateCache = false; updateCache = false;
@ -132,23 +148,41 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
cacheHits.mark(); cacheHits.mark();
uow.recordCacheAndDatabaseOperationCount(1, 0); uow.recordCacheAndDatabaseOperationCount(1, 0);
} else { } else {
updateCache = true;
uowCacheMiss.mark(); uowCacheMiss.mark();
if (isSessionCacheable()) { if (isSessionCacheable()) {
String tableName = CacheUtil.schemaName(facets); String tableName = CacheUtil.schemaName(facets);
cachedResult = (E) sessionOps.checkCache(tableName, facets); cachedResult = (E) sessionOps.checkCache(tableName, facets);
if (cachedResult != null) { if (cachedResult != null) {
Class<?> iface = MappingUtil.getMappingInterface(cachedResult);
if (Helenus.entity(iface).isDraftable()) {
result = Optional.of(cachedResult); result = Optional.of(cachedResult);
} else {
result =
Optional.of(
(E)
SerializationUtils.<Serializable>clone(
(Serializable) cachedResult));
}
updateCache = false;
sessionCacheHits.mark(); sessionCacheHits.mark();
cacheHits.mark(); cacheHits.mark();
uow.recordCacheAndDatabaseOperationCount(1, 0); uow.recordCacheAndDatabaseOperationCount(1, 0);
} else { } else {
updateCache = true;
sessionCacheMiss.mark(); sessionCacheMiss.mark();
cacheMiss.mark(); cacheMiss.mark();
uow.recordCacheAndDatabaseOperationCount(-1, 0); uow.recordCacheAndDatabaseOperationCount(-1, 0);
} }
} else {
updateCache = false;
} }
} }
} else {
//TODO(gburd): look in statement cache for results
updateCache = false; //true;
cacheMiss.mark();
uow.recordCacheAndDatabaseOperationCount(-1, 0);
}
} else { } else {
updateCache = false; updateCache = false;
} }
@ -171,14 +205,7 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
// Formulate the query and execute it against the Cassandra cluster. // Formulate the query and execute it against the Cassandra cluster.
ResultSet resultSet = ResultSet resultSet =
execute( execute(sessionOps, uow, queryExecutionTimeout, queryTimeoutUnits, showValues, true);
sessionOps,
uow,
traceContext,
queryExecutionTimeout,
queryTimeoutUnits,
showValues,
true);
// Transform the query result set into the desired shape. // Transform the query result set into the desired shape.
result = transform(resultSet); result = transform(resultSet);
@ -186,8 +213,11 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
// If we have a result, it wasn't from the UOW cache, and we're caching things // If we have a result, it wasn't from the UOW cache, and we're caching things
// then we need to put this result into the cache for future requests to find. // then we need to put this result into the cache for future requests to find.
if (updateCache && result.isPresent() && result.get() != deleted) { if (updateCache && result.isPresent()) {
cacheUpdate(uow, result.get(), getFacets()); E r = result.get();
if (!(r instanceof Fun) && r != deleted) {
cacheUpdate(uow, r, getFacets());
}
} }
return result; return result;
@ -207,9 +237,10 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
}); });
} }
public CompletableFuture<Optional<E>> async(UnitOfWork<?> uow) { public CompletableFuture<Optional<E>> async(UnitOfWork uow) {
if (uow == null) return async(); if (uow == null) return async();
return CompletableFuture.<Optional<E>>supplyAsync( CompletableFuture<Optional<E>> f =
CompletableFuture.<Optional<E>>supplyAsync(
() -> { () -> {
try { try {
return sync(); return sync();
@ -217,5 +248,7 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
throw new CompletionException(ex); throw new CompletionException(ex);
} }
}); });
uow.addFuture(f);
return f;
} }
} }

View file

@ -15,8 +15,6 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import brave.Tracer;
import brave.propagation.TraceContext;
import com.datastax.driver.core.ConsistencyLevel; import com.datastax.driver.core.ConsistencyLevel;
import com.datastax.driver.core.PreparedStatement; import com.datastax.driver.core.PreparedStatement;
import com.datastax.driver.core.RegularStatement; import com.datastax.driver.core.RegularStatement;
@ -43,19 +41,14 @@ import net.helenus.support.HelenusException;
public abstract class AbstractStatementOperation<E, O extends AbstractStatementOperation<E, O>> public abstract class AbstractStatementOperation<E, O extends AbstractStatementOperation<E, O>>
extends Operation<E> { extends Operation<E> {
private boolean ignoreCache = false;
protected boolean checkCache = true;
protected boolean showValues = true;
protected TraceContext traceContext;
long queryExecutionTimeout = 10;
TimeUnit queryTimeoutUnits = TimeUnit.SECONDS;
private ConsistencyLevel consistencyLevel; private ConsistencyLevel consistencyLevel;
private ConsistencyLevel serialConsistencyLevel; private ConsistencyLevel serialConsistencyLevel;
private RetryPolicy retryPolicy; private RetryPolicy retryPolicy;
private boolean idempotent = false;
private boolean enableTracing = false; private boolean enableTracing = false;
private long[] defaultTimestamp = null; private long[] defaultTimestamp = null;
private int[] fetchSize = null; private int[] fetchSize = null;
protected boolean idempotent = false;
public AbstractStatementOperation(AbstractSessionOperations sessionOperations) { public AbstractStatementOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations); super(sessionOperations);
@ -66,12 +59,12 @@ public abstract class AbstractStatementOperation<E, O extends AbstractStatementO
public abstract Statement buildStatement(boolean cached); public abstract Statement buildStatement(boolean cached);
public O uncached(boolean enabled) { public O uncached(boolean enabled) {
checkCache = enabled; ignoreCache = !enabled;
return (O) this; return (O) this;
} }
public O uncached() { public O uncached() {
checkCache = false; ignoreCache = true;
return (O) this; return (O) this;
} }
@ -252,22 +245,16 @@ public abstract class AbstractStatementOperation<E, O extends AbstractStatementO
statement.setFetchSize(fetchSize[0]); statement.setFetchSize(fetchSize[0]);
} }
if (idempotent) { if (isIdempotentOperation()) {
statement.setIdempotent(true); statement.setIdempotent(true);
} }
return statement; return statement;
} }
public O zipkinContext(TraceContext traceContext) { @Override
if (traceContext != null) { protected boolean isIdempotentOperation() {
Tracer tracer = this.sessionOps.getZipkinTracer(); return idempotent;
if (tracer != null) {
this.traceContext = traceContext;
}
}
return (O) this;
} }
public Statement statement() { public Statement statement() {
@ -313,7 +300,11 @@ public abstract class AbstractStatementOperation<E, O extends AbstractStatementO
throw new HelenusException("only RegularStatements can be prepared"); throw new HelenusException("only RegularStatements can be prepared");
} }
protected E checkCache(UnitOfWork<?> uow, List<Facet> facets) { protected boolean ignoreCache() {
return ignoreCache;
}
protected E checkCache(UnitOfWork uow, List<Facet> facets) {
E result = null; E result = null;
Optional<Object> optionalCachedResult = Optional.empty(); Optional<Object> optionalCachedResult = Optional.empty();
@ -327,7 +318,7 @@ public abstract class AbstractStatementOperation<E, O extends AbstractStatementO
return result; return result;
} }
protected void cacheUpdate(UnitOfWork<?> uow, E pojo, List<Facet> identifyingFacets) { protected Object cacheUpdate(UnitOfWork uow, E pojo, List<Facet> identifyingFacets) {
List<Facet> facets = new ArrayList<>(); List<Facet> facets = new ArrayList<>();
Map<String, Object> valueMap = Map<String, Object> valueMap =
pojo instanceof MapExportable ? ((MapExportable) pojo).toMap() : null; pojo instanceof MapExportable ? ((MapExportable) pojo).toMap() : null;
@ -359,6 +350,6 @@ public abstract class AbstractStatementOperation<E, O extends AbstractStatementO
} }
// Cache the value (pojo), the statement key, and the fully bound facets. // Cache the value (pojo), the statement key, and the fully bound facets.
uow.cacheUpdate(pojo, facets); return uow.cacheUpdate(pojo, facets);
} }
} }

View file

@ -24,6 +24,7 @@ import com.google.common.base.Function;
import com.google.common.base.Stopwatch; import com.google.common.base.Stopwatch;
import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListenableFuture;
import java.io.Serializable;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletableFuture;
@ -31,9 +32,13 @@ import java.util.concurrent.CompletionException;
import java.util.concurrent.TimeoutException; import java.util.concurrent.TimeoutException;
import java.util.stream.Stream; import java.util.stream.Stream;
import net.helenus.core.AbstractSessionOperations; import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.Helenus;
import net.helenus.core.UnitOfWork; import net.helenus.core.UnitOfWork;
import net.helenus.core.cache.CacheUtil; import net.helenus.core.cache.CacheUtil;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
import net.helenus.mapping.MappingUtil;
import net.helenus.support.Fun;
import org.apache.commons.lang3.SerializationUtils;
public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperation<E, O>> public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperation<E, O>>
extends AbstractStatementOperation<E, O> { extends AbstractStatementOperation<E, O> {
@ -67,8 +72,10 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
E cacheResult = null; E cacheResult = null;
boolean updateCache = isSessionCacheable(); boolean updateCache = isSessionCacheable();
if (checkCache && isSessionCacheable()) { if (!ignoreCache() && isSessionCacheable()) {
List<Facet> facets = bindFacetValues(); List<Facet> facets = bindFacetValues();
if (facets != null && facets.size() > 0) {
if (facets.stream().filter(f -> !f.fixed()).distinct().count() > 0) {
String tableName = CacheUtil.schemaName(facets); String tableName = CacheUtil.schemaName(facets);
cacheResult = (E) sessionOps.checkCache(tableName, facets); cacheResult = (E) sessionOps.checkCache(tableName, facets);
if (cacheResult != null) { if (cacheResult != null) {
@ -80,6 +87,10 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
sessionCacheMiss.mark(); sessionCacheMiss.mark();
cacheMiss.mark(); cacheMiss.mark();
} }
} else {
//TODO(gburd): look in statement cache for results
}
}
} }
if (resultStream == null) { if (resultStream == null) {
@ -88,11 +99,10 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
this.execute( this.execute(
sessionOps, sessionOps,
null, null,
traceContext,
queryExecutionTimeout, queryExecutionTimeout,
queryTimeoutUnits, queryTimeoutUnits,
showValues, showValues,
false); isSessionCacheable());
// Transform the query result set into the desired shape. // Transform the query result set into the desired shape.
resultStream = transform(resultSet); resultStream = transform(resultSet);
@ -104,7 +114,11 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
List<E> again = new ArrayList<>(); List<E> again = new ArrayList<>();
resultStream.forEach( resultStream.forEach(
result -> { result -> {
Class<?> resultClass = result.getClass();
if (!(resultClass.getEnclosingClass() != null
&& resultClass.getEnclosingClass() == Fun.class)) {
sessionOps.updateCache(result, facets); sessionOps.updateCache(result, facets);
}
again.add(result); again.add(result);
}); });
resultStream = again.stream(); resultStream = again.stream();
@ -126,11 +140,12 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
E cachedResult = null; E cachedResult = null;
final boolean updateCache; final boolean updateCache;
if (checkCache) { if (!ignoreCache()) {
Stopwatch timer = Stopwatch.createStarted(); Stopwatch timer = Stopwatch.createStarted();
try { try {
List<Facet> facets = bindFacetValues(); List<Facet> facets = bindFacetValues();
if (facets != null) { if (facets != null && facets.size() > 0) {
if (facets.stream().filter(f -> !f.fixed()).distinct().count() > 0) {
cachedResult = checkCache(uow, facets); cachedResult = checkCache(uow, facets);
if (cachedResult != null) { if (cachedResult != null) {
updateCache = false; updateCache = false;
@ -139,23 +154,40 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
cacheHits.mark(); cacheHits.mark();
uow.recordCacheAndDatabaseOperationCount(1, 0); uow.recordCacheAndDatabaseOperationCount(1, 0);
} else { } else {
updateCache = true;
uowCacheMiss.mark(); uowCacheMiss.mark();
if (isSessionCacheable()) { if (isSessionCacheable()) {
String tableName = CacheUtil.schemaName(facets); String tableName = CacheUtil.schemaName(facets);
cachedResult = (E) sessionOps.checkCache(tableName, facets); cachedResult = (E) sessionOps.checkCache(tableName, facets);
if (cachedResult != null) { if (cachedResult != null) {
resultStream = Stream.of(cachedResult); Class<?> iface = MappingUtil.getMappingInterface(cachedResult);
E result = null;
if (Helenus.entity(iface).isDraftable()) {
result = cachedResult;
} else {
result =
(E) SerializationUtils.<Serializable>clone((Serializable) cachedResult);
}
updateCache = false;
resultStream = Stream.of(result);
sessionCacheHits.mark(); sessionCacheHits.mark();
cacheHits.mark(); cacheHits.mark();
uow.recordCacheAndDatabaseOperationCount(1, 0); uow.recordCacheAndDatabaseOperationCount(1, 0);
} else { } else {
updateCache = true;
sessionCacheMiss.mark(); sessionCacheMiss.mark();
cacheMiss.mark(); cacheMiss.mark();
uow.recordCacheAndDatabaseOperationCount(-1, 0); uow.recordCacheAndDatabaseOperationCount(-1, 0);
} }
} else {
updateCache = false;
} }
} }
} else {
//TODO(gburd): look in statement cache for results
updateCache = false; //true;
cacheMiss.mark();
uow.recordCacheAndDatabaseOperationCount(-1, 0);
}
} else { } else {
updateCache = false; updateCache = false;
} }
@ -170,33 +202,29 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
// Check to see if we fetched the object from the cache // Check to see if we fetched the object from the cache
if (resultStream == null) { if (resultStream == null) {
ResultSet resultSet = ResultSet resultSet =
execute( execute(sessionOps, uow, queryExecutionTimeout, queryTimeoutUnits, showValues, true);
sessionOps,
uow,
traceContext,
queryExecutionTimeout,
queryTimeoutUnits,
showValues,
true);
resultStream = transform(resultSet); resultStream = transform(resultSet);
} }
// If we have a result and we're caching then we need to put it into the cache // If we have a result and we're caching then we need to put it into the cache
// for future requests to find. // for future requests to find.
if (resultStream != null) { if (resultStream != null) {
if (updateCache) {
List<E> again = new ArrayList<>(); List<E> again = new ArrayList<>();
List<Facet> facets = getFacets(); List<Facet> facets = getFacets();
resultStream.forEach( resultStream.forEach(
result -> { result -> {
if (result != deleted) { Class<?> resultClass = result.getClass();
if (updateCache) { if (result != deleted
cacheUpdate(uow, result, facets); && !(resultClass.getEnclosingClass() != null
&& resultClass.getEnclosingClass() == Fun.class)) {
result = (E) cacheUpdate(uow, result, facets);
} }
again.add(result); again.add(result);
}
}); });
resultStream = again.stream(); resultStream = again.stream();
} }
}
return resultStream; return resultStream;
} finally { } finally {
@ -217,7 +245,8 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
public CompletableFuture<Stream<E>> async(UnitOfWork uow) { public CompletableFuture<Stream<E>> async(UnitOfWork uow) {
if (uow == null) return async(); if (uow == null) return async();
return CompletableFuture.<Stream<E>>supplyAsync( CompletableFuture<Stream<E>> f =
CompletableFuture.<Stream<E>>supplyAsync(
() -> { () -> {
try { try {
return sync(); return sync();
@ -225,5 +254,7 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
throw new CompletionException(ex); throw new CompletionException(ex);
} }
}); });
uow.addFuture(f);
return f;
} }
} }

View file

@ -0,0 +1,135 @@
/*
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.helenus.core.operation;
import com.codahale.metrics.Timer;
import com.datastax.driver.core.AtomicMonotonicTimestampGenerator;
import com.datastax.driver.core.BatchStatement;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.TimestampGenerator;
import com.google.common.base.Stopwatch;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeoutException;
import java.util.stream.Collectors;
import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.UnitOfWork;
import net.helenus.support.HelenusException;
public class BatchOperation extends Operation<Long> {
//TODO(gburd): find the way to get the driver's timestamp generator
private static final TimestampGenerator timestampGenerator =
new AtomicMonotonicTimestampGenerator();
private final BatchStatement batch;
private List<AbstractOperation<?, ?>> operations = new ArrayList<AbstractOperation<?, ?>>();
private boolean logged = true;
public BatchOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations);
batch = new BatchStatement();
}
public void add(AbstractOperation<?, ?> operation) {
operations.add(operation);
}
@Override
public BatchStatement buildStatement(boolean cached) {
batch.addAll(
operations.stream().map(o -> o.buildStatement(cached)).collect(Collectors.toList()));
batch.setConsistencyLevel(sessionOps.getDefaultConsistencyLevel());
return batch;
}
public BatchOperation logged() {
logged = true;
return this;
}
public BatchOperation setLogged(boolean logStatements) {
logged = logStatements;
return this;
}
public Long sync() throws TimeoutException {
if (operations.size() == 0) return 0L;
final Timer.Context context = requestLatency.time();
try {
batch.setDefaultTimestamp(timestampGenerator.next());
ResultSet resultSet =
this.execute(
sessionOps, null, queryExecutionTimeout, queryTimeoutUnits, showValues, false);
if (!resultSet.wasApplied()) {
throw new HelenusException("Failed to apply batch.");
}
} finally {
context.stop();
}
return batch.getDefaultTimestamp();
}
public Long sync(UnitOfWork uow) throws TimeoutException {
if (operations.size() == 0) return 0L;
if (uow == null) return sync();
final Timer.Context context = requestLatency.time();
final Stopwatch timer = Stopwatch.createStarted();
try {
uow.recordCacheAndDatabaseOperationCount(0, 1);
batch.setDefaultTimestamp(timestampGenerator.next());
ResultSet resultSet =
this.execute(
sessionOps, uow, queryExecutionTimeout, queryTimeoutUnits, showValues, false);
if (!resultSet.wasApplied()) {
throw new HelenusException("Failed to apply batch.");
}
} finally {
context.stop();
timer.stop();
}
uow.addDatabaseTime("Cassandra", timer);
return batch.getDefaultTimestamp();
}
public void addAll(BatchOperation batch) {
batch.operations.forEach(o -> this.operations.add(o));
}
public String toString() {
return toString(true); //TODO(gburd): sessionOps.showQueryValues()
}
public String toString(boolean showValues) {
StringBuilder s = new StringBuilder();
s.append("BEGIN ");
if (!logged) {
s.append("UNLOGGED ");
}
s.append("BATCH ");
if (batch.getDefaultTimestamp() > -9223372036854775808L) {
s.append("USING TIMESTAMP ").append(String.valueOf(batch.getDefaultTimestamp())).append(" ");
}
s.append(
operations
.stream()
.map(o -> Operation.queryString(o.buildStatement(showValues), showValues))
.collect(Collectors.joining(" ")));
s.append(" APPLY BATCH;");
return s.toString();
}
}

View file

@ -37,6 +37,7 @@ public final class CountOperation extends AbstractFilterOperation<Long, CountOpe
public CountOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity) { public CountOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity) {
super(sessionOperations); super(sessionOperations);
this.entity = entity; this.entity = entity;
//TODO(gburd): cache SELECT COUNT results within the scope of a UOW
} }
@Override @Override

View file

@ -28,6 +28,7 @@ import net.helenus.core.UnitOfWork;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
import net.helenus.support.HelenusException;
import net.helenus.support.HelenusMappingException; import net.helenus.support.HelenusMappingException;
public final class DeleteOperation extends AbstractFilterOperation<ResultSet, DeleteOperation> { public final class DeleteOperation extends AbstractFilterOperation<ResultSet, DeleteOperation> {
@ -133,6 +134,10 @@ public final class DeleteOperation extends AbstractFilterOperation<ResultSet, De
return bindFacetValues(getFacets()); return bindFacetValues(getFacets());
} }
protected boolean isIdempotentOperation() {
return true;
}
@Override @Override
public ResultSet sync() throws TimeoutException { public ResultSet sync() throws TimeoutException {
ResultSet result = super.sync(); ResultSet result = super.sync();
@ -152,6 +157,16 @@ public final class DeleteOperation extends AbstractFilterOperation<ResultSet, De
return result; return result;
} }
public ResultSet batch(UnitOfWork uow) throws TimeoutException {
if (uow == null) {
throw new HelenusException("UnitOfWork cannot be null when batching operations.");
}
uow.cacheEvict(bindFacetValues());
uow.batch(this);
return null;
}
@Override @Override
public List<Facet> getFacets() { public List<Facet> getFacets() {
return entity.getFacets(); return entity.getFacets();

View file

@ -22,15 +22,17 @@ import com.datastax.driver.core.querybuilder.QueryBuilder;
import java.util.*; import java.util.*;
import java.util.concurrent.TimeoutException; import java.util.concurrent.TimeoutException;
import java.util.function.Function; import java.util.function.Function;
import java.util.stream.Collectors;
import net.helenus.core.AbstractSessionOperations; import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.Getter; import net.helenus.core.Getter;
import net.helenus.core.Helenus; import net.helenus.core.Helenus;
import net.helenus.core.UnitOfWork; import net.helenus.core.UnitOfWork;
import net.helenus.core.cache.CacheUtil;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
import net.helenus.core.cache.UnboundFacet; import net.helenus.core.cache.UnboundFacet;
import net.helenus.core.reflect.DefaultPrimitiveTypes; import net.helenus.core.reflect.DefaultPrimitiveTypes;
import net.helenus.core.reflect.Drafted;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.core.reflect.MapExportable;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
import net.helenus.mapping.MappingUtil; import net.helenus.mapping.MappingUtil;
@ -45,26 +47,44 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
new ArrayList<Fun.Tuple2<HelenusPropertyNode, Object>>(); new ArrayList<Fun.Tuple2<HelenusPropertyNode, Object>>();
private final T pojo; private final T pojo;
private final Class<?> resultType; private final Class<?> resultType;
private final Set<String> readSet;
private HelenusEntity entity; private HelenusEntity entity;
private boolean ifNotExists; private boolean ifNotExists;
private int[] ttl; private int[] ttl;
private long[] timestamp; private long[] timestamp;
private long writeTime = 0L;
public InsertOperation(AbstractSessionOperations sessionOperations, boolean ifNotExists) { public InsertOperation(AbstractSessionOperations sessionOperations, boolean ifNotExists) {
super(sessionOperations); super(sessionOperations);
this.ifNotExists = ifNotExists;
this.pojo = null; this.pojo = null;
this.readSet = null;
this.ifNotExists = ifNotExists;
this.resultType = ResultSet.class; this.resultType = ResultSet.class;
} }
public InsertOperation(
AbstractSessionOperations sessionOperations,
HelenusEntity entity,
Class<?> resultType,
boolean ifNotExists) {
super(sessionOperations);
this.pojo = null;
this.readSet = null;
this.ifNotExists = ifNotExists;
this.resultType = resultType;
this.entity = entity;
}
public InsertOperation( public InsertOperation(
AbstractSessionOperations sessionOperations, Class<?> resultType, boolean ifNotExists) { AbstractSessionOperations sessionOperations, Class<?> resultType, boolean ifNotExists) {
super(sessionOperations); super(sessionOperations);
this.ifNotExists = ifNotExists;
this.pojo = null; this.pojo = null;
this.readSet = null;
this.ifNotExists = ifNotExists;
this.resultType = resultType; this.resultType = resultType;
} }
@ -73,11 +93,13 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
HelenusEntity entity, HelenusEntity entity,
T pojo, T pojo,
Set<String> mutations, Set<String> mutations,
Set<String> read,
boolean ifNotExists) { boolean ifNotExists) {
super(sessionOperations); super(sessionOperations);
this.entity = entity;
this.pojo = pojo; this.pojo = pojo;
this.readSet = read;
this.entity = entity;
this.ifNotExists = ifNotExists; this.ifNotExists = ifNotExists;
this.resultType = entity.getMappingInterface(); this.resultType = entity.getMappingInterface();
@ -136,8 +158,32 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
@Override @Override
public BuiltStatement buildStatement(boolean cached) { public BuiltStatement buildStatement(boolean cached) {
List<HelenusEntity> entities =
values.forEach(t -> addPropertyNode(t._1)); values
.stream()
.map(t -> t._1.getProperty().getEntity())
.distinct()
.collect(Collectors.toList());
if (entities.size() != 1) {
throw new HelenusMappingException(
"you can insert only single entity at a time, found: "
+ entities
.stream()
.map(e -> e.getMappingInterface().toString())
.collect(Collectors.joining(", ")));
}
HelenusEntity entity = entities.get(0);
if (this.entity != null) {
if (this.entity != entity) {
throw new HelenusMappingException(
"you can insert only single entity at a time, found: "
+ this.entity.getMappingInterface().toString()
+ ", "
+ entity.getMappingInterface().toString());
}
} else {
this.entity = entity;
}
if (values.isEmpty()) return null; if (values.isEmpty()) return null;
@ -156,6 +202,8 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
insert.value(t._1.getColumnName(), t._2); insert.value(t._1.getColumnName(), t._2);
}); });
//TODO(gburd): IF NOT EXISTS when @Constraints.Relationship is 1:1 or 1:m
if (this.ttl != null) { if (this.ttl != null) {
insert.using(QueryBuilder.ttl(this.ttl[0])); insert.using(QueryBuilder.ttl(this.ttl[0]));
} }
@ -166,16 +214,9 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
return insert; return insert;
} }
@Override private T newInstance(Class<?> iface) {
public T transform(ResultSet resultSet) {
if ((ifNotExists == true) && (resultSet.wasApplied() == false)) {
throw new HelenusException("Statement was not applied due to consistency constraints");
}
Class<?> iface = entity.getMappingInterface();
if (resultType == iface) {
if (values.size() > 0) { if (values.size() > 0) {
boolean immutable = iface.isAssignableFrom(Drafted.class); boolean immutable = entity.isDraftable();
Collection<HelenusProperty> properties = entity.getOrderedProperties(); Collection<HelenusProperty> properties = entity.getOrderedProperties();
Map<String, Object> backingMap = new HashMap<String, Object>(properties.size()); Map<String, Object> backingMap = new HashMap<String, Object>(properties.size());
@ -216,9 +257,24 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
// Lastly, create a new proxy object for the entity and return the new instance. // Lastly, create a new proxy object for the entity and return the new instance.
return (T) Helenus.map(iface, backingMap); return (T) Helenus.map(iface, backingMap);
} }
return null;
}
@Override
public T transform(ResultSet resultSet) {
if ((ifNotExists == true) && (resultSet.wasApplied() == false)) {
throw new HelenusException("Statement was not applied due to consistency constraints");
}
Class<?> iface = entity.getMappingInterface();
if (resultType == iface) {
T o = newInstance(iface);
if (o == null) {
// Oddly, this insert didn't change anything so simply return the pojo. // Oddly, this insert didn't change anything so simply return the pojo.
return (T) pojo; return (T) pojo;
} }
return o;
}
return (T) resultSet; return (T) resultSet;
} }
@ -234,23 +290,48 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
return this; return this;
} }
private void addPropertyNode(HelenusPropertyNode p) { protected void adjustTtlAndWriteTime(MapExportable pojo) {
if (entity == null) { if (ttl != null || writeTime != 0L) {
entity = p.getEntity(); List<String> columnNames =
} else if (entity != p.getEntity()) { values
throw new HelenusMappingException( .stream()
"you can insert only single entity " .map(t -> t._1.getProperty())
+ entity.getMappingInterface() .filter(
+ " or " prop -> {
+ p.getEntity().getMappingInterface()); switch (prop.getColumnType()) {
case PARTITION_KEY:
case CLUSTERING_COLUMN:
return false;
default:
return true;
} }
})
.map(prop -> prop.getColumnName().toCql(false))
.collect(Collectors.toList());
if (columnNames.size() > 0) {
if (ttl != null) {
columnNames.forEach(name -> pojo.put(CacheUtil.ttlKey(name), ttl));
}
if (writeTime != 0L) {
columnNames.forEach(name -> pojo.put(CacheUtil.writeTimeKey(name), writeTime));
}
}
}
}
@Override
protected boolean isIdempotentOperation() {
return values.stream().map(v -> v._1.getProperty()).allMatch(prop -> prop.isIdempotent())
|| super.isIdempotentOperation();
} }
@Override @Override
public T sync() throws TimeoutException { public T sync() throws TimeoutException {
T result = super.sync(); T result = super.sync();
if (entity.isCacheable() && result != null) { if (entity.isCacheable() && result != null) {
sessionOps.updateCache(result, entity.getFacets()); adjustTtlAndWriteTime((MapExportable) result);
sessionOps.updateCache(result, bindFacetValues());
} }
return result; return result;
} }
@ -272,15 +353,35 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
} }
Class<?> iface = entity.getMappingInterface(); Class<?> iface = entity.getMappingInterface();
if (resultType == iface) { if (resultType == iface) {
cacheUpdate(uow, result, entity.getFacets()); if (entity != null && MapExportable.class.isAssignableFrom(entity.getMappingInterface())) {
} else { adjustTtlAndWriteTime((MapExportable) result);
if (entity.isCacheable()) {
sessionOps.cacheEvict(bindFacetValues());
} }
cacheUpdate(uow, result, bindFacetValues());
} }
return result; return result;
} }
public T batch(UnitOfWork uow) throws TimeoutException {
if (uow == null) {
throw new HelenusException("UnitOfWork cannot be null when batching operations.");
}
if (this.entity != null) {
Class<?> iface = this.entity.getMappingInterface();
if (resultType == iface) {
final T result = (pojo == null) ? newInstance(iface) : pojo;
if (result != null) {
adjustTtlAndWriteTime((MapExportable) result);
cacheUpdate(uow, result, bindFacetValues());
}
uow.batch(this);
return (T) result;
}
}
return sync(uow);
}
@Override @Override
public List<Facet> bindFacetValues() { public List<Facet> bindFacetValues() {
List<Facet> facets = getFacets(); List<Facet> facets = getFacets();

View file

@ -15,9 +15,6 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import brave.Span;
import brave.Tracer;
import brave.propagation.TraceContext;
import com.codahale.metrics.Meter; import com.codahale.metrics.Meter;
import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Timer; import com.codahale.metrics.Timer;
@ -42,6 +39,9 @@ public abstract class Operation<E> {
private static final Logger LOG = LoggerFactory.getLogger(Operation.class); private static final Logger LOG = LoggerFactory.getLogger(Operation.class);
protected final AbstractSessionOperations sessionOps; protected final AbstractSessionOperations sessionOps;
protected boolean showValues;
protected long queryExecutionTimeout = 10;
protected TimeUnit queryTimeoutUnits = TimeUnit.SECONDS;
protected final Meter uowCacheHits; protected final Meter uowCacheHits;
protected final Meter uowCacheMiss; protected final Meter uowCacheMiss;
protected final Meter sessionCacheHits; protected final Meter sessionCacheHits;
@ -52,6 +52,7 @@ public abstract class Operation<E> {
Operation(AbstractSessionOperations sessionOperations) { Operation(AbstractSessionOperations sessionOperations) {
this.sessionOps = sessionOperations; this.sessionOps = sessionOperations;
this.showValues = sessionOps.showValues();
MetricRegistry metrics = sessionOperations.getMetricRegistry(); MetricRegistry metrics = sessionOperations.getMetricRegistry();
if (metrics == null) { if (metrics == null) {
metrics = new MetricRegistry(); metrics = new MetricRegistry();
@ -65,6 +66,10 @@ public abstract class Operation<E> {
this.requestLatency = metrics.timer("net.helenus.request-latency"); this.requestLatency = metrics.timer("net.helenus.request-latency");
} }
public static String queryString(BatchOperation operation, boolean includeValues) {
return operation.toString(includeValues);
}
public static String queryString(Statement statement, boolean includeValues) { public static String queryString(Statement statement, boolean includeValues) {
String query = null; String query = null;
if (statement instanceof BuiltStatement) { if (statement instanceof BuiltStatement) {
@ -87,32 +92,31 @@ public abstract class Operation<E> {
public ResultSet execute( public ResultSet execute(
AbstractSessionOperations session, AbstractSessionOperations session,
UnitOfWork uow, UnitOfWork uow,
TraceContext traceContext,
long timeout, long timeout,
TimeUnit units, TimeUnit units,
boolean showValues, boolean showValues,
boolean cached) boolean cached)
throws TimeoutException { throws TimeoutException {
// Start recording in a Zipkin sub-span our execution time to perform this
// operation.
Tracer tracer = session.getZipkinTracer();
Span span = null;
if (tracer != null && traceContext != null) {
span = tracer.newChild(traceContext);
}
try {
if (span != null) {
span.name("cassandra");
span.start();
}
Statement statement = options(buildStatement(cached)); Statement statement = options(buildStatement(cached));
if (session.isShowCql()) {
String stmt =
(this instanceof BatchOperation)
? queryString((BatchOperation) this, showValues)
: queryString(statement, showValues);
session.getPrintStream().println(stmt);
} else if (LOG.isDebugEnabled()) {
String stmt =
(this instanceof BatchOperation)
? queryString((BatchOperation) this, showValues)
: queryString(statement, showValues);
LOG.info("CQL> " + stmt);
}
Stopwatch timer = Stopwatch.createStarted(); Stopwatch timer = Stopwatch.createStarted();
try { try {
ResultSetFuture futureResultSet = session.executeAsync(statement, uow, timer, showValues); ResultSetFuture futureResultSet = session.executeAsync(statement, uow, timer);
if (uow != null) uow.recordCacheAndDatabaseOperationCount(0, 1); if (uow != null) uow.recordCacheAndDatabaseOperationCount(0, 1);
ResultSet resultSet = futureResultSet.getUninterruptibly(timeout, units); ResultSet resultSet = futureResultSet.getUninterruptibly(timeout, units);
ColumnDefinitions columnDefinitions = resultSet.getColumnDefinitions(); ColumnDefinitions columnDefinitions = resultSet.getColumnDefinitions();
@ -126,11 +130,14 @@ public abstract class Operation<E> {
.map(InetAddress::toString) .map(InetAddress::toString)
.collect(Collectors.joining(", ")); .collect(Collectors.joining(", "));
ConsistencyLevel cl = ei.getAchievedConsistencyLevel(); ConsistencyLevel cl = ei.getAchievedConsistencyLevel();
if (cl == null) {
cl = statement.getConsistencyLevel();
}
int se = ei.getSpeculativeExecutions(); int se = ei.getSpeculativeExecutions();
String warn = ei.getWarnings().stream().collect(Collectors.joining(", ")); String warn = ei.getWarnings().stream().collect(Collectors.joining(", "));
String ri = String ri =
String.format( String.format(
"%s %s %s %s %s %s%sspec-retries: %d", "%s %s ~%s %s %s%s%sspec-retries: %d",
"server v" + qh.getCassandraVersion(), "server v" + qh.getCassandraVersion(),
qh.getAddress().toString(), qh.getAddress().toString(),
(oh != null && !oh.equals("")) ? " [tried: " + oh + "]" : "", (oh != null && !oh.equals("")) ? " [tried: " + oh + "]" : "",
@ -139,6 +146,7 @@ public abstract class Operation<E> {
(cl != null) (cl != null)
? (" consistency: " ? (" consistency: "
+ cl.name() + cl.name()
+ " "
+ (cl.isDCLocal() ? " DC " : "") + (cl.isDCLocal() ? " DC " : "")
+ (cl.isSerial() ? " SC " : "")) + (cl.isSerial() ? " SC " : ""))
: "", : "",
@ -158,13 +166,6 @@ public abstract class Operation<E> {
if (uow != null) uow.addDatabaseTime("Cassandra", timer); if (uow != null) uow.addDatabaseTime("Cassandra", timer);
log(statement, uow, timer, showValues); log(statement, uow, timer, showValues);
} }
} finally {
if (span != null) {
span.finish();
}
}
} }
void log(Statement statement, UnitOfWork uow, Stopwatch timer, boolean showValues) { void log(Statement statement, UnitOfWork uow, Stopwatch timer, boolean showValues) {
@ -178,10 +179,15 @@ public abstract class Operation<E> {
timerString = String.format(" %s ", timer.toString()); timerString = String.format(" %s ", timer.toString());
} }
LOG.info( LOG.info(
String.format("%s%s%s", uowString, timerString, Operation.queryString(statement, false))); String.format(
"%s%s%s", uowString, timerString, Operation.queryString(statement, showValues)));
} }
} }
protected boolean isIdempotentOperation() {
return false;
}
public Statement options(Statement statement) { public Statement options(Statement statement) {
return statement; return statement;
} }

View file

@ -63,4 +63,9 @@ public final class SelectFirstOperation<E>
public boolean isSessionCacheable() { public boolean isSessionCacheable() {
return delegate.isSessionCacheable(); return delegate.isSessionCacheable();
} }
@Override
public boolean ignoreCache() {
return delegate.ignoreCache();
}
} }

View file

@ -56,4 +56,9 @@ public final class SelectFirstTransformingOperation<R, E>
public boolean isSessionCacheable() { public boolean isSessionCacheable() {
return delegate.isSessionCacheable(); return delegate.isSessionCacheable();
} }
@Override
public boolean ignoreCache() {
return delegate.ignoreCache();
}
} }

View file

@ -23,14 +23,15 @@ import com.datastax.driver.core.querybuilder.QueryBuilder;
import com.datastax.driver.core.querybuilder.Select; import com.datastax.driver.core.querybuilder.Select;
import com.datastax.driver.core.querybuilder.Select.Selection; import com.datastax.driver.core.querybuilder.Select.Selection;
import com.datastax.driver.core.querybuilder.Select.Where; import com.datastax.driver.core.querybuilder.Select.Where;
import com.google.common.collect.Iterables;
import java.util.*; import java.util.*;
import java.util.function.Function; import java.util.function.Function;
import java.util.stream.Stream; import java.util.stream.Stream;
import java.util.stream.StreamSupport; import java.util.stream.StreamSupport;
import net.helenus.core.*; import net.helenus.core.*;
import net.helenus.core.cache.CacheUtil;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
import net.helenus.core.cache.UnboundFacet; import net.helenus.core.cache.UnboundFacet;
import net.helenus.core.reflect.Entity;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
@ -52,8 +53,10 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
protected List<Ordering> ordering = null; protected List<Ordering> ordering = null;
protected Integer limit = null; protected Integer limit = null;
protected boolean allowFiltering = false; protected boolean allowFiltering = false;
protected String alternateTableName = null; protected String alternateTableName = null;
protected boolean isCacheable = false; protected boolean isCacheable = false;
protected boolean implementsEntityType = false;
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public SelectOperation(AbstractSessionOperations sessionOperations) { public SelectOperation(AbstractSessionOperations sessionOperations) {
@ -89,7 +92,8 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
.map(p -> new HelenusPropertyNode(p, Optional.empty())) .map(p -> new HelenusPropertyNode(p, Optional.empty()))
.forEach(p -> this.props.add(p)); .forEach(p -> this.props.add(p));
isCacheable = entity.isCacheable(); this.isCacheable = entity.isCacheable();
this.implementsEntityType = Entity.class.isAssignableFrom(entity.getMappingInterface());
} }
public SelectOperation( public SelectOperation(
@ -106,7 +110,8 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
.map(p -> new HelenusPropertyNode(p, Optional.empty())) .map(p -> new HelenusPropertyNode(p, Optional.empty()))
.forEach(p -> this.props.add(p)); .forEach(p -> this.props.add(p));
isCacheable = entity.isCacheable(); this.isCacheable = entity.isCacheable();
this.implementsEntityType = Entity.class.isAssignableFrom(entity.getMappingInterface());
} }
public SelectOperation( public SelectOperation(
@ -118,6 +123,10 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
this.rowMapper = rowMapper; this.rowMapper = rowMapper;
Collections.addAll(this.props, props); Collections.addAll(this.props, props);
HelenusEntity entity = props[0].getEntity();
this.isCacheable = entity.isCacheable();
this.implementsEntityType = Entity.class.isAssignableFrom(entity.getMappingInterface());
} }
public CountOperation count() { public CountOperation count() {
@ -264,21 +273,17 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
+ prop.getEntity().getMappingInterface()); + prop.getEntity().getMappingInterface());
} }
// TODO(gburd): writeTime and ttl will be useful on merge() but cause object if (cached && implementsEntityType) {
// identity to fail.
if (false && cached) {
switch (prop.getProperty().getColumnType()) { switch (prop.getProperty().getColumnType()) {
case PARTITION_KEY: case PARTITION_KEY:
case CLUSTERING_COLUMN: case CLUSTERING_COLUMN:
break; break;
default: default:
if (entity.equals(prop.getEntity())) { if (entity.equals(prop.getEntity())) {
if (prop.getNext().isPresent()) {
columnName = Iterables.getLast(prop).getColumnName().toCql(true);
}
if (!prop.getProperty().getDataType().isCollectionType()) { if (!prop.getProperty().getDataType().isCollectionType()) {
selection.writeTime(columnName).as(columnName + "_writeTime"); columnName = prop.getProperty().getColumnName().toCql(false);
selection.ttl(columnName).as(columnName + "_ttl"); selection.ttl(columnName).as('"' + CacheUtil.ttlKey(columnName) + '"');
selection.writeTime(columnName).as('"' + CacheUtil.writeTimeKey(columnName) + '"');
} }
} }
break; break;
@ -308,16 +313,23 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
boolean isFirstIndex = true; boolean isFirstIndex = true;
for (Filter<?> filter : filters.values()) { for (Filter<?> filter : filters.values()) {
where.and(filter.getClause(sessionOps.getValuePreparer())); where.and(filter.getClause(sessionOps.getValuePreparer()));
HelenusProperty prop = filter.getNode().getProperty(); HelenusProperty filterProp = filter.getNode().getProperty();
if (allowFiltering == false) { HelenusProperty prop =
props
.stream()
.map(HelenusPropertyNode::getProperty)
.filter(thisProp -> thisProp.getPropertyName().equals(filterProp.getPropertyName()))
.findFirst()
.orElse(null);
if (allowFiltering == false && prop != null) {
switch (prop.getColumnType()) { switch (prop.getColumnType()) {
case PARTITION_KEY: case PARTITION_KEY:
case CLUSTERING_COLUMN:
break; break;
case CLUSTERING_COLUMN:
default: default:
// When using non-Cassandra-standard 2i types or when using more than one // When using non-Cassandra-standard 2i types or when using more than one
// indexed column or non-indexed columns the query must include ALLOW FILTERING. // indexed column or non-indexed columns the query must include ALLOW FILTERING.
if (prop.caseSensitiveIndex()) { if (prop.caseSensitiveIndex() == false) {
allowFiltering = true; allowFiltering = true;
} else if (prop.getIndexName() != null) { } else if (prop.getIndexName() != null) {
allowFiltering |= !isFirstIndex; allowFiltering |= !isFirstIndex;

View file

@ -56,4 +56,14 @@ public final class SelectTransformingOperation<R, E>
public Stream<R> transform(ResultSet resultSet) { public Stream<R> transform(ResultSet resultSet) {
return delegate.transform(resultSet).map(fn); return delegate.transform(resultSet).map(fn);
} }
@Override
public boolean isSessionCacheable() {
return delegate.isSessionCacheable();
}
@Override
public boolean ignoreCache() {
return delegate.ignoreCache();
}
} }

View file

@ -26,6 +26,7 @@ import java.util.function.Function;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import net.helenus.core.*; import net.helenus.core.*;
import net.helenus.core.cache.BoundFacet; import net.helenus.core.cache.BoundFacet;
import net.helenus.core.cache.CacheUtil;
import net.helenus.core.cache.Facet; import net.helenus.core.cache.Facet;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.core.reflect.MapExportable; import net.helenus.core.reflect.MapExportable;
@ -33,7 +34,6 @@ import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
import net.helenus.mapping.MappingUtil; import net.helenus.mapping.MappingUtil;
import net.helenus.mapping.value.BeanColumnValueProvider; import net.helenus.mapping.value.BeanColumnValueProvider;
import net.helenus.mapping.value.ValueProviderMap;
import net.helenus.support.HelenusException; import net.helenus.support.HelenusException;
import net.helenus.support.HelenusMappingException; import net.helenus.support.HelenusMappingException;
import net.helenus.support.Immutables; import net.helenus.support.Immutables;
@ -43,15 +43,18 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
private final Map<Assignment, BoundFacet> assignments = new HashMap<>(); private final Map<Assignment, BoundFacet> assignments = new HashMap<>();
private final AbstractEntityDraft<E> draft; private final AbstractEntityDraft<E> draft;
private final Map<String, Object> draftMap; private final Map<String, Object> draftMap;
private final Set<String> readSet;
private HelenusEntity entity = null; private HelenusEntity entity = null;
private Object pojo; private Object pojo;
private int[] ttl; private int[] ttl;
private long[] timestamp; private long[] timestamp;
private long writeTime = 0L;
public UpdateOperation(AbstractSessionOperations sessionOperations) { public UpdateOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations); super(sessionOperations);
this.draft = null; this.draft = null;
this.draftMap = null; this.draftMap = null;
this.readSet = null;
} }
public UpdateOperation( public UpdateOperation(
@ -59,14 +62,25 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
super(sessionOperations); super(sessionOperations);
this.draft = draft; this.draft = draft;
this.draftMap = draft.toMap(); this.draftMap = draft.toMap();
this.readSet = draft.read();
} }
public UpdateOperation(AbstractSessionOperations sessionOperations, Object pojo) { public UpdateOperation(AbstractSessionOperations sessionOperations, Object pojo) {
super(sessionOperations); super(sessionOperations);
this.draft = null; this.draft = null;
this.draftMap = null; this.draftMap = null;
this.pojo = pojo;
if (pojo != null) {
this.entity = Helenus.resolve(MappingUtil.getMappingInterface(pojo)); this.entity = Helenus.resolve(MappingUtil.getMappingInterface(pojo));
if (this.entity != null && entity.isCacheable() && pojo instanceof MapExportable) {
this.pojo = pojo;
this.readSet = ((MapExportable) pojo).toReadSet();
} else {
this.readSet = null;
}
} else {
this.readSet = null;
}
} }
public UpdateOperation( public UpdateOperation(
@ -74,6 +88,7 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
super(sessionOperations); super(sessionOperations);
this.draft = null; this.draft = null;
this.draftMap = null; this.draftMap = null;
this.readSet = null;
Object value = sessionOps.getValuePreparer().prepareColumnValue(v, p.getProperty()); Object value = sessionOps.getValuePreparer().prepareColumnValue(v, p.getProperty());
assignments.put(QueryBuilder.set(p.getColumnName(), value), new BoundFacet(p.getProperty(), v)); assignments.put(QueryBuilder.set(p.getColumnName(), value), new BoundFacet(p.getProperty(), v));
@ -97,15 +112,10 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
} }
} }
if (entity != null) { if (pojo != null) {
if (entity.isCacheable() && pojo != null && pojo instanceof MapExportable) { if (!BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop).equals(v)) {
String key = prop.getPropertyName(); String key = prop.getPropertyName();
Map<String, Object> map = ((MapExportable) pojo).toMap(); ((MapExportable) pojo).put(key, v);
if (!(map instanceof ValueProviderMap)) {
if (map.get(key) != v) {
map.put(key, v);
}
}
} }
} }
@ -133,13 +143,14 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(counterGetter); HelenusPropertyNode p = MappingUtil.resolveMappingProperty(counterGetter);
BoundFacet facet = null; BoundFacet facet = null;
if (pojo != null) {
HelenusProperty prop = p.getProperty(); HelenusProperty prop = p.getProperty();
if (pojo != null) {
Long value = (Long) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop); Long value = (Long) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop);
facet = new BoundFacet(prop, value + delta); facet = new BoundFacet(prop, value + delta);
} else if (draft != null) { } else if (draft != null) {
String key = p.getProperty().getPropertyName(); String key = prop.getPropertyName();
draftMap.put(key, (Long) draftMap.get(key) + delta); draftMap.put(key, (Long) draftMap.get(key) + delta);
facet = new BoundFacet(prop, draftMap.get(key));
} }
assignments.put(QueryBuilder.incr(p.getColumnName(), delta), facet); assignments.put(QueryBuilder.incr(p.getColumnName(), delta), facet);
@ -160,13 +171,14 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(counterGetter); HelenusPropertyNode p = MappingUtil.resolveMappingProperty(counterGetter);
BoundFacet facet = null; BoundFacet facet = null;
if (pojo != null) {
HelenusProperty prop = p.getProperty(); HelenusProperty prop = p.getProperty();
if (pojo != null) {
Long value = (Long) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop); Long value = (Long) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop);
facet = new BoundFacet(prop, value - delta); facet = new BoundFacet(prop, value - delta);
} else if (draft != null) { } else if (draft != null) {
String key = p.getProperty().getPropertyName(); String key = prop.getPropertyName();
draftMap.put(key, (Long) draftMap.get(key) - delta); draftMap.put(key, (Long) draftMap.get(key) - delta);
facet = new BoundFacet(prop, draftMap.get(key));
} }
assignments.put(QueryBuilder.decr(p.getColumnName(), delta), facet); assignments.put(QueryBuilder.decr(p.getColumnName(), delta), facet);
@ -191,18 +203,21 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter); HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
Object valueObj = prepareSingleListValue(p, value); Object valueObj = prepareSingleListValue(p, value);
BoundFacet facet = null; final List<V> list;
if (pojo != null) { final BoundFacet facet;
HelenusProperty prop = p.getProperty(); HelenusProperty prop = p.getProperty();
List<V> list = if (pojo != null) {
new ArrayList<V>( list = (List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
(List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
list.add(0, value); list.add(0, value);
facet = new BoundFacet(prop, list); facet = new BoundFacet(prop, list);
} else if (draft != null) { } else if (draft != null) {
String key = p.getProperty().getPropertyName(); String key = prop.getPropertyName();
List<V> list = (List<V>) draftMap.get(key); list = (List<V>) draftMap.get(key);
list.add(0, value); list.add(0, value);
facet = new BoundFacet(prop, list);
} else {
list = null;
facet = null;
} }
assignments.put(QueryBuilder.prepend(p.getColumnName(), valueObj), facet); assignments.put(QueryBuilder.prepend(p.getColumnName(), valueObj), facet);
@ -220,18 +235,21 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter); HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
List valueObj = prepareListValue(p, value); List valueObj = prepareListValue(p, value);
BoundFacet facet = null; final List<V> list;
if (pojo != null) { final BoundFacet facet;
HelenusProperty prop = p.getProperty(); HelenusProperty prop = p.getProperty();
List<V> list = if (pojo != null) {
new ArrayList<V>( list = (List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
(List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
list.addAll(0, value); list.addAll(0, value);
facet = new BoundFacet(prop, list); facet = new BoundFacet(prop, list);
} else if (draft != null && value.size() > 0) { } else if (draft != null && value.size() > 0) {
String key = p.getProperty().getPropertyName(); String key = p.getProperty().getPropertyName();
List<V> list = (List<V>) draftMap.get(key); list = (List<V>) draftMap.get(key);
list.addAll(0, value); list.addAll(0, value);
facet = new BoundFacet(prop, list);
} else {
list = null;
facet = null;
} }
assignments.put(QueryBuilder.prependAll(p.getColumnName(), valueObj), facet); assignments.put(QueryBuilder.prependAll(p.getColumnName(), valueObj), facet);
@ -249,16 +267,14 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter); HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
Object valueObj = prepareSingleListValue(p, value); Object valueObj = prepareSingleListValue(p, value);
BoundFacet facet = null; final BoundFacet facet;
if (pojo != null || draft != null) {
List<V> list;
HelenusProperty prop = p.getProperty(); HelenusProperty prop = p.getProperty();
if (pojo != null || draft != null) {
final List<V> list;
if (pojo != null) { if (pojo != null) {
list = list = (List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
new ArrayList<V>(
(List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
} else { } else {
String key = p.getProperty().getPropertyName(); String key = prop.getPropertyName();
list = (List<V>) draftMap.get(key); list = (List<V>) draftMap.get(key);
} }
if (idx < 0) { if (idx < 0) {
@ -270,6 +286,8 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
} }
list.add(0, value); list.add(0, value);
facet = new BoundFacet(prop, list); facet = new BoundFacet(prop, list);
} else {
facet = null;
} }
assignments.put(QueryBuilder.setIdx(p.getColumnName(), idx, valueObj), facet); assignments.put(QueryBuilder.setIdx(p.getColumnName(), idx, valueObj), facet);
@ -287,18 +305,21 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter); HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
Object valueObj = prepareSingleListValue(p, value); Object valueObj = prepareSingleListValue(p, value);
BoundFacet facet = null; final List<V> list;
if (pojo != null) { final BoundFacet facet;
HelenusProperty prop = p.getProperty(); HelenusProperty prop = p.getProperty();
List<V> list = if (pojo != null) {
new ArrayList<V>( list = (List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
(List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
list.add(value); list.add(value);
facet = new BoundFacet(prop, list); facet = new BoundFacet(prop, list);
} else if (draft != null) { } else if (draft != null) {
String key = p.getProperty().getPropertyName(); String key = prop.getPropertyName();
List<V> list = (List<V>) draftMap.get(key); list = (List<V>) draftMap.get(key);
list.add(value); list.add(value);
facet = new BoundFacet(prop, list);
} else {
list = null;
facet = null;
} }
assignments.put(QueryBuilder.append(p.getColumnName(), valueObj), facet); assignments.put(QueryBuilder.append(p.getColumnName(), valueObj), facet);
@ -315,18 +336,21 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter); HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
List valueObj = prepareListValue(p, value); List valueObj = prepareListValue(p, value);
BoundFacet facet = null; final List<V> list;
if (pojo != null) { final BoundFacet facet;
HelenusProperty prop = p.getProperty(); HelenusProperty prop = p.getProperty();
List<V> list = if (pojo != null) {
new ArrayList<V>( list = (List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
(List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
list.addAll(value); list.addAll(value);
facet = new BoundFacet(prop, list); facet = new BoundFacet(prop, list);
} else if (draft != null && value.size() > 0) { } else if (draft != null && value.size() > 0) {
String key = p.getProperty().getPropertyName(); String key = prop.getPropertyName();
List<V> list = (List<V>) draftMap.get(key); list = (List<V>) draftMap.get(key);
list.addAll(value); list.addAll(value);
facet = new BoundFacet(prop, list);
} else {
list = null;
facet = null;
} }
assignments.put(QueryBuilder.appendAll(p.getColumnName(), valueObj), facet); assignments.put(QueryBuilder.appendAll(p.getColumnName(), valueObj), facet);
@ -343,18 +367,21 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter); HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
Object valueObj = prepareSingleListValue(p, value); Object valueObj = prepareSingleListValue(p, value);
BoundFacet facet = null; final List<V> list;
if (pojo != null) { final BoundFacet facet;
HelenusProperty prop = p.getProperty(); HelenusProperty prop = p.getProperty();
List<V> list = if (pojo != null) {
new ArrayList<V>( list = (List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
(List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
list.remove(value); list.remove(value);
facet = new BoundFacet(prop, list); facet = new BoundFacet(prop, list);
} else if (draft != null) { } else if (draft != null) {
String key = p.getProperty().getPropertyName(); String key = prop.getPropertyName();
List<V> list = (List<V>) draftMap.get(key); list = (List<V>) draftMap.get(key);
list.remove(value); list.remove(value);
facet = new BoundFacet(prop, list);
} else {
list = null;
facet = null;
} }
assignments.put(QueryBuilder.discard(p.getColumnName(), valueObj), facet); assignments.put(QueryBuilder.discard(p.getColumnName(), valueObj), facet);
@ -371,18 +398,21 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter); HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
List valueObj = prepareListValue(p, value); List valueObj = prepareListValue(p, value);
BoundFacet facet = null; final List<V> list;
if (pojo != null) { final BoundFacet facet;
HelenusProperty prop = p.getProperty(); HelenusProperty prop = p.getProperty();
List<V> list = if (pojo != null) {
new ArrayList<V>( list = (List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
(List<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
list.removeAll(value); list.removeAll(value);
facet = new BoundFacet(prop, list); facet = new BoundFacet(prop, list);
} else if (draft != null) { } else if (draft != null) {
String key = p.getProperty().getPropertyName(); String key = prop.getPropertyName();
List<V> list = (List<V>) draftMap.get(key); list = (List<V>) draftMap.get(key);
list.removeAll(value); list.removeAll(value);
facet = new BoundFacet(prop, list);
} else {
list = null;
facet = null;
} }
assignments.put(QueryBuilder.discardAll(p.getColumnName(), valueObj), facet); assignments.put(QueryBuilder.discardAll(p.getColumnName(), valueObj), facet);
@ -437,17 +467,21 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter); HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
Object valueObj = prepareSingleSetValue(p, value); Object valueObj = prepareSingleSetValue(p, value);
BoundFacet facet = null; final Set<V> set;
if (pojo != null) { final BoundFacet facet;
HelenusProperty prop = p.getProperty(); HelenusProperty prop = p.getProperty();
Set<V> set = if (pojo != null) {
new HashSet<V>((Set<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop)); set = (Set<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
set.add(value); set.add(value);
facet = new BoundFacet(prop, set); facet = new BoundFacet(prop, set);
} else if (draft != null) { } else if (draft != null) {
String key = p.getProperty().getPropertyName(); String key = prop.getPropertyName();
Set<V> set = (Set<V>) draftMap.get(key); set = (Set<V>) draftMap.get(key);
set.add(value); set.add(value);
facet = new BoundFacet(prop, set);
} else {
set = null;
facet = null;
} }
assignments.put(QueryBuilder.add(p.getColumnName(), valueObj), facet); assignments.put(QueryBuilder.add(p.getColumnName(), valueObj), facet);
@ -464,17 +498,21 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter); HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
Set valueObj = prepareSetValue(p, value); Set valueObj = prepareSetValue(p, value);
BoundFacet facet = null; final Set<V> set;
if (pojo != null) { final BoundFacet facet;
HelenusProperty prop = p.getProperty(); HelenusProperty prop = p.getProperty();
Set<V> set = if (pojo != null) {
new HashSet<V>((Set<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop)); set = (Set<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
set.addAll(value); set.addAll(value);
facet = new BoundFacet(prop, set); facet = new BoundFacet(prop, set);
} else if (draft != null) { } else if (draft != null) {
String key = p.getProperty().getPropertyName(); String key = prop.getPropertyName();
Set<V> set = (Set<V>) draftMap.get(key); set = (Set<V>) draftMap.get(key);
set.addAll(value); set.addAll(value);
facet = new BoundFacet(prop, set);
} else {
set = null;
facet = null;
} }
assignments.put(QueryBuilder.addAll(p.getColumnName(), valueObj), facet); assignments.put(QueryBuilder.addAll(p.getColumnName(), valueObj), facet);
@ -491,17 +529,21 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter); HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
Object valueObj = prepareSingleSetValue(p, value); Object valueObj = prepareSingleSetValue(p, value);
BoundFacet facet = null; final Set<V> set;
if (pojo != null) { final BoundFacet facet;
HelenusProperty prop = p.getProperty(); HelenusProperty prop = p.getProperty();
Set<V> set = if (pojo != null) {
new HashSet<V>((Set<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop)); set = (Set<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
set.remove(value); set.remove(value);
facet = new BoundFacet(prop, set); facet = new BoundFacet(prop, set);
} else if (draft != null) { } else if (draft != null) {
String key = p.getProperty().getPropertyName(); String key = prop.getPropertyName();
Set<V> set = (Set<V>) draftMap.get(key); set = (Set<V>) draftMap.get(key);
set.remove(value); set.remove(value);
facet = new BoundFacet(prop, set);
} else {
set = null;
facet = null;
} }
assignments.put(QueryBuilder.remove(p.getColumnName(), valueObj), facet); assignments.put(QueryBuilder.remove(p.getColumnName(), valueObj), facet);
@ -518,17 +560,21 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter); HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
Set valueObj = prepareSetValue(p, value); Set valueObj = prepareSetValue(p, value);
BoundFacet facet = null; final Set<V> set;
if (pojo != null) { final BoundFacet facet;
HelenusProperty prop = p.getProperty(); HelenusProperty prop = p.getProperty();
Set<V> set = if (pojo != null) {
new HashSet<V>((Set<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop)); set = (Set<V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
set.removeAll(value); set.removeAll(value);
facet = new BoundFacet(prop, set); facet = new BoundFacet(prop, set);
} else if (draft != null) { } else if (draft != null) {
String key = p.getProperty().getPropertyName(); String key = prop.getPropertyName();
Set<V> set = (Set<V>) draftMap.get(key); set = (Set<V>) draftMap.get(key);
set.removeAll(value); set.removeAll(value);
facet = new BoundFacet(prop, set);
} else {
set = null;
facet = null;
} }
assignments.put(QueryBuilder.removeAll(p.getColumnName(), valueObj), facet); assignments.put(QueryBuilder.removeAll(p.getColumnName(), valueObj), facet);
@ -582,15 +628,19 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(mapGetter); HelenusPropertyNode p = MappingUtil.resolveMappingProperty(mapGetter);
HelenusProperty prop = p.getProperty(); HelenusProperty prop = p.getProperty();
BoundFacet facet = null; final Map<K, V> map;
final BoundFacet facet;
if (pojo != null) { if (pojo != null) {
Map<K, V> map = map = (Map<K, V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
new HashMap<K, V>(
(Map<K, V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
map.put(key, value); map.put(key, value);
facet = new BoundFacet(prop, map); facet = new BoundFacet(prop, map);
} else if (draft != null) { } else if (draft != null) {
((Map<K, V>) draftMap.get(prop.getPropertyName())).put(key, value); map = (Map<K, V>) draftMap.get(prop.getPropertyName());
map.put(key, value);
facet = new BoundFacet(prop, map);
} else {
map = null;
facet = null;
} }
Optional<Function<Object, Object>> converter = Optional<Function<Object, Object>> converter =
@ -618,15 +668,19 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(mapGetter); HelenusPropertyNode p = MappingUtil.resolveMappingProperty(mapGetter);
HelenusProperty prop = p.getProperty(); HelenusProperty prop = p.getProperty();
BoundFacet facet = null; final Map<K, V> newMap;
final BoundFacet facet;
if (pojo != null) { if (pojo != null) {
Map<K, V> newMap = newMap = (Map<K, V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop, false);
new HashMap<K, V>(
(Map<K, V>) BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop));
newMap.putAll(map); newMap.putAll(map);
facet = new BoundFacet(prop, newMap); facet = new BoundFacet(prop, newMap);
} else if (draft != null) { } else if (draft != null) {
((Map<K, V>) draftMap.get(prop.getPropertyName())).putAll(map); newMap = (Map<K, V>) draftMap.get(prop.getPropertyName());
newMap.putAll(map);
facet = new BoundFacet(prop, newMap);
} else {
newMap = null;
facet = null;
} }
Optional<Function<Object, Object>> converter = Optional<Function<Object, Object>> converter =
@ -718,13 +772,58 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
} }
} }
private void adjustTtlAndWriteTime(MapExportable pojo) {
if (ttl != null || writeTime != 0L) {
List<String> names = new ArrayList<String>(assignments.size());
for (BoundFacet facet : assignments.values()) {
for (HelenusProperty prop : facet.getProperties()) {
names.add(prop.getColumnName().toCql(false));
}
}
if (names.size() > 0) {
if (ttl != null) {
names.forEach(name -> pojo.put(CacheUtil.ttlKey(name), ttl));
}
if (writeTime != 0L) {
names.forEach(name -> pojo.put(CacheUtil.writeTimeKey(name), writeTime));
}
}
}
}
@Override
protected boolean isIdempotentOperation() {
return assignments
.values()
.stream()
.allMatch(
facet -> {
if (facet != null) {
Set<HelenusProperty> props = facet.getProperties();
if (props != null && props.size() > 0) {
return props.stream().allMatch(prop -> prop.isIdempotent());
} else {
return true;
}
} else {
// In this case our UPDATE statement made mutations via the List, Set, Map methods only.
return false;
}
})
|| super.isIdempotentOperation();
}
@Override @Override
public E sync() throws TimeoutException { public E sync() throws TimeoutException {
E result = super.sync(); E result = super.sync();
if (entity.isCacheable()) { if (result != null && entity.isCacheable()) {
if (draft != null) { if (draft != null) {
sessionOps.updateCache(draft, bindFacetValues()); adjustTtlAndWriteTime(draft);
adjustTtlAndWriteTime((MapExportable) result);
sessionOps.updateCache(result, bindFacetValues());
} else if (pojo != null) { } else if (pojo != null) {
adjustTtlAndWriteTime((MapExportable) pojo);
sessionOps.updateCache(pojo, bindFacetValues()); sessionOps.updateCache(pojo, bindFacetValues());
} else { } else {
sessionOps.cacheEvict(bindFacetValues()); sessionOps.cacheEvict(bindFacetValues());
@ -739,15 +838,47 @@ public final class UpdateOperation<E> extends AbstractFilterOperation<E, UpdateO
return sync(); return sync();
} }
E result = super.sync(uow); E result = super.sync(uow);
if (result != null) {
if (draft != null) { if (draft != null) {
adjustTtlAndWriteTime(draft);
}
if (entity != null && MapExportable.class.isAssignableFrom(entity.getMappingInterface())) {
adjustTtlAndWriteTime((MapExportable) result);
cacheUpdate(uow, result, bindFacetValues()); cacheUpdate(uow, result, bindFacetValues());
} else if (pojo != null) { } else if (pojo != null) {
adjustTtlAndWriteTime((MapExportable) pojo);
cacheUpdate(uow, (E) pojo, bindFacetValues()); cacheUpdate(uow, (E) pojo, bindFacetValues());
return (E) pojo; return (E) pojo;
} }
}
return result; return result;
} }
public E batch(UnitOfWork uow) throws TimeoutException {
if (uow == null) {
throw new HelenusException("UnitOfWork cannot be null when batching operations.");
}
final E result;
if (draft != null) {
result = draft.build();
adjustTtlAndWriteTime(draft);
} else if (pojo != null) {
result = (E) pojo;
adjustTtlAndWriteTime((MapExportable) pojo);
} else {
result = null;
}
if (result != null) {
cacheUpdate(uow, result, bindFacetValues());
uow.batch(this);
return result;
}
return sync(uow);
}
@Override @Override
public List<Facet> bindFacetValues() { public List<Facet> bindFacetValues() {
List<Facet> facets = bindFacetValues(entity.getFacets()); List<Facet> facets = bindFacetValues(entity.getFacets());

View file

@ -22,4 +22,6 @@ public interface Drafted<T> extends MapExportable {
Set<String> mutated(); Set<String> mutated();
T build(); T build();
Set<String> read();
} }

View file

@ -0,0 +1,72 @@
/*
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.helenus.core.reflect;
import net.helenus.core.Getter;
public interface Entity {
String WRITTEN_AT_METHOD = "writtenAt";
String TTL_OF_METHOD = "ttlOf";
String TOKEN_OF_METHOD = "tokenOf";
/**
* The write time for the property in question referenced by the getter.
*
* @param getter the property getter
* @return the timestamp associated with the property identified by the getter
*/
default Long writtenAt(Getter getter) {
return 0L;
}
/**
* The write time for the property in question referenced by the property name.
*
* @param prop the name of a property in this entity
* @return the timestamp associated with the property identified by the property name if it exists
*/
default Long writtenAt(String prop) {
return 0L;
};
/**
* The time-to-live for the property in question referenced by the getter.
*
* @param getter the property getter
* @return the time-to-live in seconds associated with the property identified by the getter
*/
default Integer ttlOf(Getter getter) {
return 0;
};
/**
* The time-to-live for the property in question referenced by the property name.
*
* @param prop the name of a property in this entity
* @return the time-to-live in seconds associated with the property identified by the property name if it exists
*/
default Integer ttlOf(String prop) {
return 0;
};
/**
* The token (partition identifier) for this entity which can change over time if
* the cluster grows or shrinks but should be stable otherwise.
*
* @return the token for the entity
*/
default Long tokenOf() { return 0L; }
}

View file

@ -63,6 +63,11 @@ public final class HelenusNamedProperty implements HelenusProperty {
return false; return false;
} }
@Override
public boolean isIdempotent() {
return false;
}
@Override @Override
public Class<?> getJavaType() { public Class<?> getJavaType() {
throw new HelenusMappingException("will never called"); throw new HelenusMappingException("will never called");

View file

@ -16,10 +16,25 @@
package net.helenus.core.reflect; package net.helenus.core.reflect;
import java.util.Map; import java.util.Map;
import java.util.Set;
import net.helenus.core.Getter;
public interface MapExportable { public interface MapExportable {
String TO_MAP_METHOD = "toMap";
public static final String TO_MAP_METHOD = "toMap"; String TO_READ_SET_METHOD = "toReadSet";
String PUT_METHOD = "put";
Map<String, Object> toMap(); Map<String, Object> toMap();
default Map<String, Object> toMap(boolean mutable) {
return null;
}
default Set<String> toReadSet() {
return null;
}
default void put(String key, Object value) {}
default <T> void put(Getter<T> getter, T value) {}
} }

View file

@ -15,6 +15,9 @@
*/ */
package net.helenus.core.reflect; package net.helenus.core.reflect;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import java.io.InvalidObjectException; import java.io.InvalidObjectException;
import java.io.ObjectInputStream; import java.io.ObjectInputStream;
import java.io.ObjectStreamException; import java.io.ObjectStreamException;
@ -24,10 +27,11 @@ import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationHandler; import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.lang.reflect.Proxy; import java.lang.reflect.Proxy;
import java.util.HashMap; import java.util.*;
import java.util.Map; import net.helenus.core.Getter;
import java.util.Set;
import net.helenus.core.Helenus; import net.helenus.core.Helenus;
import net.helenus.core.cache.CacheUtil;
import net.helenus.mapping.MappingUtil;
import net.helenus.mapping.annotation.Transient; import net.helenus.mapping.annotation.Transient;
import net.helenus.mapping.value.ValueProviderMap; import net.helenus.mapping.value.ValueProviderMap;
import net.helenus.support.HelenusException; import net.helenus.support.HelenusException;
@ -35,7 +39,8 @@ import net.helenus.support.HelenusException;
public class MapperInvocationHandler<E> implements InvocationHandler, Serializable { public class MapperInvocationHandler<E> implements InvocationHandler, Serializable {
private static final long serialVersionUID = -7044209982830584984L; private static final long serialVersionUID = -7044209982830584984L;
private final Map<String, Object> src; private Map<String, Object> src;
private final Set<String> read = new HashSet<String>();
private final Class<E> iface; private final Class<E> iface;
public MapperInvocationHandler(Class<E> iface, Map<String, Object> src) { public MapperInvocationHandler(Class<E> iface, Map<String, Object> src) {
@ -95,15 +100,96 @@ public class MapperInvocationHandler<E> implements InvocationHandler, Serializab
return true; return true;
} }
} }
if (otherObj instanceof MapExportable && src.equals(((MapExportable) otherObj).toMap())) { if (otherObj instanceof MapExportable) {
return true; return MappingUtil.compareMaps((MapExportable) otherObj, src);
}
if (src instanceof MapExportable && otherObj.equals(((MapExportable) src).toMap())) {
return true;
} }
return false; return false;
} }
if (MapExportable.PUT_METHOD.equals(methodName) && method.getParameterCount() == 2) {
final String key;
if (args[0] instanceof String) {
key = (String) args[0];
} else if (args[0] instanceof Getter) {
key = MappingUtil.resolveMappingProperty((Getter) args[0]).getProperty().getPropertyName();
} else {
key = null;
}
if (key != null) {
final Object value = (Object) args[1];
if (src instanceof ValueProviderMap) {
this.src = fromValueProviderMap(src);
}
src.put(key, value);
}
return null;
}
if (Entity.WRITTEN_AT_METHOD.equals(methodName) && method.getParameterCount() == 1) {
final String key;
if (args[0] instanceof String) {
key = CacheUtil.writeTimeKey((String) args[0]);
} else if (args[0] instanceof Getter) {
Getter getter = (Getter) args[0];
key =
CacheUtil.writeTimeKey(
MappingUtil.resolveMappingProperty(getter)
.getProperty()
.getColumnName()
.toCql(false));
} else {
return 0L;
}
Long v = (Long) src.get(key);
if (v != null) {
return v;
}
return 0L;
}
if (Entity.TOKEN_OF_METHOD.equals(methodName) && method.getParameterCount() == 0) {
Long v = (Long) src.get("");
if (v != null) {
return v;
}
return 0L;
}
if (Entity.TTL_OF_METHOD.equals(methodName) && method.getParameterCount() == 1) {
final String key;
if (args[0] instanceof String) {
key = CacheUtil.ttlKey((String) args[0]);
} else if (args[0] instanceof Getter) {
Getter getter = (Getter) args[0];
key =
CacheUtil.ttlKey(
MappingUtil.resolveMappingProperty(getter)
.getProperty()
.getColumnName()
.toCql(false));
} else {
return 0;
}
int v[] = (int[]) src.get(key);
if (v != null) {
return v[0];
}
return 0;
}
if (MapExportable.TO_MAP_METHOD.equals(methodName)) {
if (method.getParameterCount() == 1 && args[0] instanceof Boolean) {
if ((boolean) args[0] == true) {
return fromValueProviderMap(src, true);
}
}
return Collections.unmodifiableMap(src);
}
if (MapExportable.TO_READ_SET_METHOD.equals(methodName)) {
return read;
}
if (method.getParameterCount() != 0 || method.getReturnType() == void.class) { if (method.getParameterCount() != 0 || method.getReturnType() == void.class) {
throw new HelenusException("invalid getter method " + method); throw new HelenusException("invalid getter method " + method);
} }
@ -128,26 +214,21 @@ public class MapperInvocationHandler<E> implements InvocationHandler, Serializab
return Helenus.dsl(iface); return Helenus.dsl(iface);
} }
if (MapExportable.TO_MAP_METHOD.equals(methodName)) { final Object value = src.get(methodName);
return src; // Collections.unmodifiableMap(src); read.add(methodName);
}
Object value = src.get(methodName);
Class<?> returnType = method.getReturnType();
if (value == null) { if (value == null) {
Class<?> returnType = method.getReturnType();
// Default implementations of non-Transient methods in entities are the default // Default implementations of non-Transient methods in entities are the default
// value when the // value when the map contains 'null'.
// map contains 'null'.
if (method.isDefault()) { if (method.isDefault()) {
return invokeDefault(proxy, method, args); return invokeDefault(proxy, method, args);
} }
// Otherwise, if the return type of the method is a primitive Java type then // Otherwise, if the return type of the method is a primitive Java type then
// we'll return the standard // we'll return the standard default values to avoid a NPE in user code.
// default values to avoid a NPE in user code.
if (returnType.isPrimitive()) { if (returnType.isPrimitive()) {
DefaultPrimitiveTypes type = DefaultPrimitiveTypes.lookup(returnType); DefaultPrimitiveTypes type = DefaultPrimitiveTypes.lookup(returnType);
if (type == null) { if (type == null) {
@ -160,6 +241,35 @@ public class MapperInvocationHandler<E> implements InvocationHandler, Serializab
return value; return value;
} }
static Map<String, Object> fromValueProviderMap(Map v) {
return fromValueProviderMap(v, false);
}
static Map<String, Object> fromValueProviderMap(Map v, boolean mutable) {
if (v instanceof ValueProviderMap) {
Map<String, Object> m = new HashMap<String, Object>(v.size());
Set<String> keys = v.keySet();
for (String key : keys) {
Object value = v.get(key);
if (value != null && mutable) {
if (ImmutableList.class.isAssignableFrom(value.getClass())) {
m.put(key, new ArrayList((List) value));
} else if (ImmutableMap.class.isAssignableFrom(value.getClass())) {
m.put(key, new HashMap((Map) value));
} else if (ImmutableSet.class.isAssignableFrom(value.getClass())) {
m.put(key, new HashSet((Set) value));
} else {
m.put(key, value);
}
} else {
m.put(key, value);
}
}
return m;
}
return v;
}
static class SerializationProxy<E> implements Serializable { static class SerializationProxy<E> implements Serializable {
private static final long serialVersionUID = -5617583940055969353L; private static final long serialVersionUID = -5617583940055969353L;
@ -170,11 +280,7 @@ public class MapperInvocationHandler<E> implements InvocationHandler, Serializab
public SerializationProxy(MapperInvocationHandler mapper) { public SerializationProxy(MapperInvocationHandler mapper) {
this.iface = mapper.iface; this.iface = mapper.iface;
if (mapper.src instanceof ValueProviderMap) { if (mapper.src instanceof ValueProviderMap) {
this.src = new HashMap<String, Object>(mapper.src.size()); this.src = fromValueProviderMap(mapper.src);
Set<String> keys = mapper.src.keySet();
for (String key : keys) {
this.src.put(key, mapper.src.get(key));
}
} else { } else {
this.src = mapper.src; this.src = mapper.src;
} }

View file

@ -34,4 +34,6 @@ public interface HelenusEntity {
HelenusProperty getProperty(String name); HelenusProperty getProperty(String name);
List<Facet> getFacets(); List<Facet> getFacets();
boolean isDraftable();
} }

View file

@ -31,6 +31,7 @@ import net.helenus.mapping.annotation.*;
import net.helenus.mapping.validator.DistinctValidator; import net.helenus.mapping.validator.DistinctValidator;
import net.helenus.support.HelenusMappingException; import net.helenus.support.HelenusMappingException;
import org.apache.commons.lang3.ClassUtils; import org.apache.commons.lang3.ClassUtils;
import org.apache.commons.lang3.StringUtils;
public final class HelenusMappingEntity implements HelenusEntity { public final class HelenusMappingEntity implements HelenusEntity {
@ -38,6 +39,7 @@ public final class HelenusMappingEntity implements HelenusEntity {
private final HelenusEntityType type; private final HelenusEntityType type;
private final IdentityName name; private final IdentityName name;
private final boolean cacheable; private final boolean cacheable;
private final boolean draftable;
private final ImmutableMap<String, Method> methods; private final ImmutableMap<String, Method> methods;
private final ImmutableMap<String, HelenusProperty> props; private final ImmutableMap<String, HelenusProperty> props;
private final ImmutableList<HelenusProperty> orderedProps; private final ImmutableList<HelenusProperty> orderedProps;
@ -111,9 +113,25 @@ public final class HelenusMappingEntity implements HelenusEntity {
// Caching // Caching
cacheable = (null != iface.getDeclaredAnnotation(Cacheable.class)); cacheable = (null != iface.getDeclaredAnnotation(Cacheable.class));
// Draft
Class<?> draft;
try {
draft = Class.forName(iface.getName() + "$Draft");
} catch (Exception ignored) {
draft = null;
}
draftable = (draft != null);
// Materialized view
List<HelenusProperty> primaryKeyProperties = new ArrayList<>(); List<HelenusProperty> primaryKeyProperties = new ArrayList<>();
ImmutableList.Builder<Facet> facetsBuilder = ImmutableList.builder(); ImmutableList.Builder<Facet> facetsBuilder = ImmutableList.builder();
if (iface.getDeclaredAnnotation(MaterializedView.class) == null) {
facetsBuilder.add(new Facet("table", name.toCql()).setFixed()); facetsBuilder.add(new Facet("table", name.toCql()).setFixed());
} else {
facetsBuilder.add(
new Facet("table", Helenus.entity(iface.getInterfaces()[0]).getName().toCql())
.setFixed());
}
for (HelenusProperty prop : orderedProps) { for (HelenusProperty prop : orderedProps) {
switch (prop.getColumnType()) { switch (prop.getColumnType()) {
case PARTITION_KEY: case PARTITION_KEY:
@ -127,8 +145,25 @@ public final class HelenusMappingEntity implements HelenusEntity {
} }
for (ConstraintValidator<?, ?> constraint : for (ConstraintValidator<?, ?> constraint :
MappingUtil.getValidators(prop.getGetterMethod())) { MappingUtil.getValidators(prop.getGetterMethod())) {
if (constraint.getClass().isAssignableFrom(DistinctValidator.class)) { if (constraint instanceof DistinctValidator) {
UnboundFacet facet = new UnboundFacet(prop); DistinctValidator validator = (DistinctValidator) constraint;
String[] values = validator.constraintAnnotation.value();
UnboundFacet facet;
if (values != null && values.length >= 1 && !(StringUtils.isBlank(values[0]))) {
List<HelenusProperty> props = new ArrayList<HelenusProperty>(values.length + 1);
props.add(prop);
for (String value : values) {
for (HelenusProperty p : orderedProps) {
String name = p.getPropertyName();
if (name.equals(value) && !name.equals(prop.getPropertyName())) {
props.add(p);
}
}
}
facet = new UnboundFacet(props, validator.alone(), validator.combined());
} else {
facet = new UnboundFacet(prop, validator.alone(), validator.combined());
}
facetsBuilder.add(facet); facetsBuilder.add(facet);
break; break;
} }
@ -188,6 +223,11 @@ public final class HelenusMappingEntity implements HelenusEntity {
return cacheable; return cacheable;
} }
@Override
public boolean isDraftable() {
return draftable;
}
@Override @Override
public Class<?> getMappingInterface() { public Class<?> getMappingInterface() {
return iface; return iface;

View file

@ -35,6 +35,7 @@ public final class HelenusMappingProperty implements HelenusProperty {
private final String propertyName; private final String propertyName;
private final Optional<IdentityName> indexName; private final Optional<IdentityName> indexName;
private final boolean caseSensitiveIndex; private final boolean caseSensitiveIndex;
private final boolean idempotent;
private final ColumnInformation columnInfo; private final ColumnInformation columnInfo;
@ -56,6 +57,15 @@ public final class HelenusMappingProperty implements HelenusProperty {
this.columnInfo = new ColumnInformation(getter); this.columnInfo = new ColumnInformation(getter);
switch (this.columnInfo.getColumnType()) {
case PARTITION_KEY:
case CLUSTERING_COLUMN:
this.idempotent = true;
break;
default:
this.idempotent = MappingUtil.idempotent(getter);
}
this.genericJavaType = getter.getGenericReturnType(); this.genericJavaType = getter.getGenericReturnType();
this.javaType = getter.getReturnType(); this.javaType = getter.getReturnType();
this.abstractJavaType = MappingJavaTypes.resolveJavaType(this.javaType); this.abstractJavaType = MappingJavaTypes.resolveJavaType(this.javaType);
@ -112,6 +122,11 @@ public final class HelenusMappingProperty implements HelenusProperty {
return caseSensitiveIndex; return caseSensitiveIndex;
} }
@Override
public boolean isIdempotent() {
return idempotent;
}
@Override @Override
public String getPropertyName() { public String getPropertyName() {
return propertyName; return propertyName;

View file

@ -37,6 +37,8 @@ public interface HelenusProperty {
boolean caseSensitiveIndex(); boolean caseSensitiveIndex();
boolean isIdempotent();
Class<?> getJavaType(); Class<?> getJavaType();
AbstractDataType getDataType(); AbstractDataType getDataType();

View file

@ -16,11 +16,12 @@
package net.helenus.mapping; package net.helenus.mapping;
import java.lang.annotation.Annotation; import java.lang.annotation.Annotation;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Optional; import java.util.Optional;
import java.util.stream.Collectors;
import javax.validation.Constraint; import javax.validation.Constraint;
import javax.validation.ConstraintValidator; import javax.validation.ConstraintValidator;
import net.helenus.core.Getter; import net.helenus.core.Getter;
@ -122,10 +123,29 @@ public final class MappingUtil {
return false; return false;
} }
public static boolean idempotent(Method getterMethod) {
Column column = getterMethod.getDeclaredAnnotation(Column.class);
if (column != null) {
return column.idempotent();
}
return false;
}
public static String getPropertyName(Method getter) { public static String getPropertyName(Method getter) {
return getter.getName(); return getter.getName();
} }
public static HelenusProperty getPropertyForColumn(HelenusEntity entity, String name) {
if (name == null) return null;
return entity
.getOrderedProperties()
.stream()
.filter(p -> p.getColumnName().equals(name))
.findFirst()
.orElse(null);
}
public static String getDefaultColumnName(Method getter) { public static String getDefaultColumnName(Method getter) {
return Helenus.settings().getPropertyToColumnConverter().apply(getPropertyName(getter)); return Helenus.settings().getPropertyToColumnConverter().apply(getPropertyName(getter));
} }
@ -284,28 +304,6 @@ public final class MappingUtil {
} }
} }
// https://stackoverflow.com/a/4882306/366692
public static <T> T clone(T object) throws CloneNotSupportedException {
Object clone = null;
// Use reflection, because there is no other way
try {
Method method = object.getClass().getMethod("clone");
clone = method.invoke(object);
} catch (InvocationTargetException e) {
rethrow(e.getCause());
} catch (Exception cause) {
rethrow(cause);
}
if (object.getClass().isInstance(clone)) {
@SuppressWarnings("unchecked") // clone class <= object class <= T
T t = (T) clone;
return t;
} else {
throw new ClassCastException(clone.getClass().getName());
}
}
private static void rethrow(Throwable cause) throws CloneNotSupportedException { private static void rethrow(Throwable cause) throws CloneNotSupportedException {
if (cause instanceof RuntimeException) { if (cause instanceof RuntimeException) {
throw (RuntimeException) cause; throw (RuntimeException) cause;
@ -320,4 +318,32 @@ public final class MappingUtil {
e.initCause(cause); e.initCause(cause);
throw e; throw e;
} }
public static boolean compareMaps(MapExportable me, Map<String, Object> m2) {
Map<String, Object> m1 = me.toMap();
List<String> matching =
m2.entrySet()
.stream()
.filter(e -> !e.getKey().matches("^_.*_(ttl|writeTime)$"))
.filter(
e -> {
String k = e.getKey();
if (m1.containsKey(k)) {
Object o1 = e.getValue();
Object o2 = m1.get(k);
if (o1 == o2 || o1.equals(o2)) return true;
}
return false;
})
.map(e -> e.getKey())
.collect(Collectors.toList());
List<String> divergent =
m1.entrySet()
.stream()
.filter(e -> !e.getKey().matches("^_.*_(ttl|writeTime)$"))
.filter(e -> !matching.contains(e.getKey()))
.map(e -> e.getKey())
.collect(Collectors.toList());
return divergent.size() > 0 ? false : true;
}
} }

View file

@ -59,4 +59,13 @@ public @interface Column {
* @return true if name have to be quoted * @return true if name have to be quoted
*/ */
boolean forceQuote() default false; boolean forceQuote() default false;
/**
* Used to determine if mutations (insert, upsert, update) can be retried by the server. When all
* fields in a query are idempotent the query is marked idempotent. Optionally, a user can
* explicitly mark a query idempotent even if all fields are not marked as such.
*
* @return
*/
boolean idempotent() default false;
} }

View file

@ -228,10 +228,107 @@ public final class Constraints {
public @interface Distinct { public @interface Distinct {
/** /**
* User defined Enum to further restrict the items in the set. * User defined list of properties that combine with this one to result in a distinct
* combination in the table.
* *
* @return Java * @return Java
*/ */
Class<? extends Enum> value() default Enum.class; String[] value() default "";
boolean alone() default true;
boolean combined() default true;
}
/**
* Distinct annotation is used to signal, but not ensure that a value should be distinct in the
* database.
*
* <p>Can be used only for @java.lang.CharSequence
*
* <p>It does not have effect on selects and data retrieval operations
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = OneToOneRelationshipValidator.class)
public @interface OneToOne {
/**
* User defined list of properties that combine with this one to result in a distinct
* combination in the table.
*
* @return Java
*/
String[] value() default "";
}
/**
* Distinct annotation is used to signal, but not ensure that a value should be distinct in the
* database.
*
* <p>Can be used only for @java.lang.CharSequence
*
* <p>It does not have effect on selects and data retrieval operations
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = OneToManyRelationshipValidator.class)
public @interface OneToMany {
/**
* User defined list of properties that combine with this one to result in a distinct
* combination in the table.
*
* @return Java
*/
String[] value() default "";
}
/**
* Distinct annotation is used to signal, but not ensure that a value should be distinct in the
* database.
*
* <p>Can be used only for @java.lang.CharSequence
*
* <p>It does not have effect on selects and data retrieval operations
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = ManyToOneRelationshipValidator.class)
public @interface ManyToOne {
/**
* User defined list of properties that combine with this one to result in a distinct
* combination in the table.
*
* @return Java
*/
String[] value() default "";
}
/**
* Distinct annotation is used to signal, but not ensure that a value should be distinct in the
* database.
*
* <p>Can be used only for @java.lang.CharSequence
*
* <p>It does not have effect on selects and data retrieval operations
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = ManyToManyRelationshipValidator.class)
public @interface ManyToMany {
/**
* User defined list of properties that combine with this one to result in a distinct
* combination in the table.
*
* @return Java
*/
String[] value() default "";
} }
} }

View file

@ -13,31 +13,22 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package net.helenus.mapping.validator;
package net.helenus.core.cache; import java.lang.annotation.Annotation;
import javax.validation.ConstraintValidator;
import javax.validation.ConstraintValidatorContext;
import com.google.common.cache.Cache; public abstract class AbstractConstraintValidator<A extends Annotation, T>
implements ConstraintValidator<A, T> {
public class GuavaCache<K, V> implements SessionCache<K, V> { public A constraintAnnotation;
final Cache<K, V> cache; @Override
public void initialize(A constraintAnnotation) {
GuavaCache(Cache<K, V> cache) { this.constraintAnnotation = constraintAnnotation;
this.cache = cache;
} }
@Override @Override
public void invalidate(K key) { public abstract boolean isValid(T value, ConstraintValidatorContext context);
cache.invalidate(key);
}
@Override
public V get(K key) {
return cache.getIfPresent(key);
}
@Override
public void put(K key, V value) {
cache.put(key, value);
}
} }

View file

@ -20,15 +20,32 @@ import javax.validation.ConstraintValidatorContext;
import net.helenus.mapping.annotation.Constraints; import net.helenus.mapping.annotation.Constraints;
public final class DistinctValidator public final class DistinctValidator
extends AbstractConstraintValidator<Constraints.Distinct, CharSequence>
implements ConstraintValidator<Constraints.Distinct, CharSequence> { implements ConstraintValidator<Constraints.Distinct, CharSequence> {
private Constraints.Distinct annotation;
@Override @Override
public void initialize(Constraints.Distinct constraintAnnotation) {} public void initialize(Constraints.Distinct constraintAnnotation) {
super.initialize(constraintAnnotation);
this.annotation = constraintAnnotation;
}
@Override @Override
public boolean isValid(CharSequence value, ConstraintValidatorContext context) { public boolean isValid(CharSequence value, ConstraintValidatorContext context) {
// TODO(gburd): if there is an Enum type supplied, check that value is valid // TODO(gburd): check that the list contains valid property names.
// Enum.name()
return true; return true;
} }
public String[] value() {
return annotation == null ? null : annotation.value();
}
public boolean alone() {
return annotation == null ? true : annotation.alone();
}
public boolean combined() {
return annotation == null ? true : annotation.combined();
}
} }

View file

@ -0,0 +1,33 @@
/*
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.helenus.mapping.validator;
import javax.validation.ConstraintValidatorContext;
import net.helenus.mapping.annotation.Constraints;
public class ManyToManyRelationshipValidator extends RelationshipValidator<Constraints.ManyToMany> {
@Override
public void initialize(Constraints.ManyToMany constraintAnnotation) {
super.initialize(constraintAnnotation);
}
@Override
public boolean isValid(CharSequence value, ConstraintValidatorContext context) {
// TODO(gburd): check that the list contains valid property names.
return true;
}
}

View file

@ -0,0 +1,33 @@
/*
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.helenus.mapping.validator;
import javax.validation.ConstraintValidatorContext;
import net.helenus.mapping.annotation.Constraints;
public class ManyToOneRelationshipValidator extends RelationshipValidator<Constraints.ManyToOne> {
@Override
public void initialize(Constraints.ManyToOne constraintAnnotation) {
super.initialize(constraintAnnotation);
}
@Override
public boolean isValid(CharSequence value, ConstraintValidatorContext context) {
// TODO(gburd): check that the list contains valid property names.
return true;
}
}

View file

@ -0,0 +1,33 @@
/*
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.helenus.mapping.validator;
import javax.validation.ConstraintValidatorContext;
import net.helenus.mapping.annotation.Constraints;
public class OneToManyRelationshipValidator extends RelationshipValidator<Constraints.OneToMany> {
@Override
public void initialize(Constraints.OneToMany constraintAnnotation) {
super.initialize(constraintAnnotation);
}
@Override
public boolean isValid(CharSequence value, ConstraintValidatorContext context) {
// TODO(gburd): check that the list contains valid property names.
return true;
}
}

View file

@ -13,14 +13,21 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package net.helenus.core; package net.helenus.mapping.validator;
import net.helenus.support.HelenusException; import javax.validation.ConstraintValidatorContext;
import net.helenus.mapping.annotation.Constraints;
class UnitOfWorkImpl extends AbstractUnitOfWork<HelenusException> { public class OneToOneRelationshipValidator extends RelationshipValidator<Constraints.OneToOne> {
@SuppressWarnings("unchecked") @Override
public UnitOfWorkImpl(HelenusSession session, UnitOfWork parent) { public void initialize(Constraints.OneToOne constraintAnnotation) {
super(session, (AbstractUnitOfWork<HelenusException>) parent); super.initialize(constraintAnnotation);
}
@Override
public boolean isValid(CharSequence value, ConstraintValidatorContext context) {
// TODO(gburd): check that the list contains valid property names.
return true;
} }
} }

View file

@ -0,0 +1,35 @@
/*
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.helenus.mapping.validator;
import java.lang.annotation.Annotation;
import javax.validation.ConstraintValidator;
import javax.validation.ConstraintValidatorContext;
public abstract class RelationshipValidator<A extends Annotation>
extends AbstractConstraintValidator<A, CharSequence>
implements ConstraintValidator<A, CharSequence> {
@Override
public void initialize(A constraintAnnotation) {
super.initialize(constraintAnnotation);
}
@Override
public boolean isValid(CharSequence value, ConstraintValidatorContext context) {
return false;
}
}

View file

@ -32,6 +32,7 @@ public enum BeanColumnValueProvider implements ColumnValueProvider {
Object value = null; Object value = null;
try { try {
getter.setAccessible(true);
value = getter.invoke(bean, new Object[] {}); value = getter.invoke(bean, new Object[] {});
} catch (InvocationTargetException e) { } catch (InvocationTargetException e) {
if (e.getCause() != null) { if (e.getCause() != null) {

View file

@ -15,11 +15,11 @@
*/ */
package net.helenus.mapping.value; package net.helenus.mapping.value;
import com.google.common.collect.ImmutableMap;
import java.util.Collection; import java.util.Collection;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import net.helenus.core.reflect.Drafted;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
import net.helenus.support.HelenusMappingException; import net.helenus.support.HelenusMappingException;
@ -35,7 +35,7 @@ public final class ValueProviderMap implements Map<String, Object> {
this.source = source; this.source = source;
this.valueProvider = valueProvider; this.valueProvider = valueProvider;
this.entity = entity; this.entity = entity;
this.immutable = entity.getMappingInterface().isAssignableFrom(Drafted.class); this.immutable = entity.isDraftable();
} }
private static void throwShouldNeverCall(String methodName) { private static void throwShouldNeverCall(String methodName) {
@ -45,8 +45,7 @@ public final class ValueProviderMap implements Map<String, Object> {
methodName)); methodName));
} }
@Override public Object get(Object key, boolean immutable) {
public Object get(Object key) {
if (key instanceof String) { if (key instanceof String) {
String name = (String) key; String name = (String) key;
HelenusProperty prop = entity.getProperty(name); HelenusProperty prop = entity.getProperty(name);
@ -57,6 +56,11 @@ public final class ValueProviderMap implements Map<String, Object> {
return null; return null;
} }
@Override
public Object get(Object key) {
return get(key, this.immutable);
}
@Override @Override
public Set<String> keySet() { public Set<String> keySet() {
return entity return entity
@ -78,7 +82,7 @@ public final class ValueProviderMap implements Map<String, Object> {
@Override @Override
public boolean containsKey(Object key) { public boolean containsKey(Object key) {
if (key instanceof Object) { if (key instanceof String) {
String s = (String) key; String s = (String) key;
return keySet().contains(s); return keySet().contains(s);
} }
@ -149,8 +153,10 @@ public final class ValueProviderMap implements Map<String, Object> {
@Override @Override
public boolean equals(Object o) { public boolean equals(Object o) {
if (this == o) return true; if (this == o) return true;
if (o == null || (!o.getClass().isAssignableFrom(Map.class) && getClass() != o.getClass())) if (o == null
return false; || !((Map.class.isAssignableFrom(o.getClass())
|| ImmutableMap.class.isAssignableFrom(o.getClass()))
|| o.getClass().getSimpleName().equals("UnmodifiableMap"))) return false;
Map that = (Map) o; Map that = (Map) o;
if (this.size() != that.size()) return false; if (this.size() != that.size()) return false;

View file

@ -17,6 +17,7 @@ package net.helenus.test.integration.core;
import net.helenus.core.Helenus; import net.helenus.core.Helenus;
import net.helenus.core.HelenusSession; import net.helenus.core.HelenusSession;
import net.helenus.core.UnitOfWork;
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest; import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
import org.junit.Test; import org.junit.Test;
@ -29,4 +30,11 @@ public class ContextInitTest extends AbstractEmbeddedCassandraTest {
System.out.println("Works! " + session); System.out.println("Works! " + session);
} }
@Test
public void testWithNullSession() {
HelenusSession session = Helenus.init(null, "foo").get();
UnitOfWork uow = session.begin();
uow.abort();
}
} }

View file

@ -21,9 +21,11 @@ import java.io.*;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.UUID;
import java.util.concurrent.TimeoutException; import java.util.concurrent.TimeoutException;
import net.helenus.core.Helenus; import net.helenus.core.Helenus;
import net.helenus.core.HelenusSession; import net.helenus.core.HelenusSession;
import net.helenus.core.UnitOfWork;
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest; import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
import org.junit.Assert; import org.junit.Assert;
import org.junit.BeforeClass; import org.junit.BeforeClass;
@ -34,6 +36,8 @@ public class EntityDraftBuilderTest extends AbstractEmbeddedCassandraTest {
static Supply supply; static Supply supply;
static HelenusSession session; static HelenusSession session;
static Supply.Draft draft = null; static Supply.Draft draft = null;
static UUID id = null;
static String region = null;
@BeforeClass @BeforeClass
public static void beforeTest() throws TimeoutException { public static void beforeTest() throws TimeoutException {
@ -68,6 +72,8 @@ public class EntityDraftBuilderTest extends AbstractEmbeddedCassandraTest {
}); });
Supply s1 = session.<Supply>insert(draft).sync(); Supply s1 = session.<Supply>insert(draft).sync();
id = s1.id();
region = s1.region();
} }
@Test @Test
@ -76,7 +82,8 @@ public class EntityDraftBuilderTest extends AbstractEmbeddedCassandraTest {
Supply s1 = Supply s1 =
session session
.<Supply>select(Supply.class) .<Supply>select(Supply.class)
.where(supply::id, eq(draft.id())) .where(supply::id, eq(id))
.and(supply::region, eq(region))
.single() .single()
.sync() .sync()
.orElse(null); .orElse(null);
@ -87,6 +94,7 @@ public class EntityDraftBuilderTest extends AbstractEmbeddedCassandraTest {
.<Supply>update(s1.update()) .<Supply>update(s1.update())
.prepend(supply::suppliers, "Pignose Supply, LLC.") .prepend(supply::suppliers, "Pignose Supply, LLC.")
.sync(); .sync();
Assert.assertEquals(s2.suppliers().get(0), "Pignose Supply, LLC."); Assert.assertEquals(s2.suppliers().get(0), "Pignose Supply, LLC.");
// Set // Set
@ -99,6 +107,59 @@ public class EntityDraftBuilderTest extends AbstractEmbeddedCassandraTest {
Assert.assertEquals((long) s4.demand().get("NORAM"), 10L); Assert.assertEquals((long) s4.demand().get("NORAM"), 10L);
} }
@Test
public void testDraftMergeInNestedUow() throws Exception {
Supply s1, s2, s3, s4, s5;
Supply.Draft d1;
s1 =
session
.<Supply>select(Supply.class)
.where(supply::id, eq(id))
.and(supply::region, eq(region))
.single()
.sync()
.orElse(null);
try (UnitOfWork uow1 = session.begin()) {
s2 =
session
.<Supply>select(Supply.class)
.where(supply::id, eq(id))
.and(supply::region, eq(region))
.single()
.sync(uow1)
.orElse(null);
try (UnitOfWork uow2 = session.begin(uow1)) {
s3 =
session
.<Supply>select(Supply.class)
.where(supply::id, eq(id))
.and(supply::region, eq(region))
.single()
.sync(uow2)
.orElse(null);
d1 = s3.update().setCode("WIDGET-002-UPDATED");
s4 =
session.update(d1).usingTtl(20).defaultTimestamp(System.currentTimeMillis()).sync(uow2);
uow2.commit();
}
s5 =
session
.<Supply>select(Supply.class)
.where(supply::id, eq(id))
.and(supply::region, eq(region))
.single()
.sync(uow1)
.orElse(null);
}
}
@Test @Test
public void testSerialization() throws Exception { public void testSerialization() throws Exception {
Supply s1, s2; Supply s1, s2;

View file

@ -3,11 +3,13 @@ package net.helenus.test.integration.core.draft;
import java.util.UUID; import java.util.UUID;
import net.helenus.core.AbstractAuditedEntityDraft; import net.helenus.core.AbstractAuditedEntityDraft;
import net.helenus.core.Helenus; import net.helenus.core.Helenus;
import net.helenus.core.reflect.Drafted;
import net.helenus.core.reflect.Entity;
import net.helenus.core.reflect.MapExportable; import net.helenus.core.reflect.MapExportable;
import net.helenus.mapping.annotation.*; import net.helenus.mapping.annotation.*;
@Table @Table
public interface Inventory { public interface Inventory extends Entity {
static Inventory inventory = Helenus.dsl(Inventory.class); static Inventory inventory = Helenus.dsl(Inventory.class);
@ -36,7 +38,7 @@ public interface Inventory {
return new Draft(this); return new Draft(this);
} }
class Draft extends AbstractAuditedEntityDraft<Inventory> { class Draft extends AbstractAuditedEntityDraft<Inventory> implements Drafted<Inventory> {
// Entity/Draft pattern-enabling methods: // Entity/Draft pattern-enabling methods:
Draft(UUID id) { Draft(UUID id) {

View file

@ -7,11 +7,15 @@ import java.util.Set;
import java.util.UUID; import java.util.UUID;
import net.helenus.core.AbstractEntityDraft; import net.helenus.core.AbstractEntityDraft;
import net.helenus.core.Helenus; import net.helenus.core.Helenus;
import net.helenus.core.annotation.Cacheable;
import net.helenus.core.reflect.Drafted;
import net.helenus.core.reflect.Entity;
import net.helenus.core.reflect.MapExportable; import net.helenus.core.reflect.MapExportable;
import net.helenus.mapping.annotation.*; import net.helenus.mapping.annotation.*;
@Table @Table
public interface Supply { @Cacheable
public interface Supply extends Entity {
static Supply supply = Helenus.dsl(Supply.class); static Supply supply = Helenus.dsl(Supply.class);
@ -48,8 +52,7 @@ public interface Supply {
return new Draft(this); return new Draft(this);
} }
class Draft extends AbstractEntityDraft<Supply> { class Draft extends AbstractEntityDraft<Supply> implements Drafted<Supply> {
// Entity/Draft pattern-enabling methods: // Entity/Draft pattern-enabling methods:
Draft(String region) { Draft(String region) {
super(null); super(null);

View file

@ -17,13 +17,11 @@ package net.helenus.test.integration.core.simple;
import static net.helenus.core.Query.eq; import static net.helenus.core.Query.eq;
import com.datastax.driver.core.ResultSet;
import java.util.Optional; import java.util.Optional;
import java.util.concurrent.TimeoutException; import java.util.concurrent.TimeoutException;
import net.helenus.core.Helenus; import net.helenus.core.Helenus;
import net.helenus.core.HelenusSession; import net.helenus.core.HelenusSession;
import net.helenus.core.Operator; import net.helenus.core.Operator;
import net.helenus.core.operation.UpdateOperation;
import net.helenus.support.Fun; import net.helenus.support.Fun;
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest; import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
import org.junit.Assert; import org.junit.Assert;
@ -184,7 +182,6 @@ public class SimpleUserTest extends AbstractEmbeddedCassandraTest {
.set(user::age, null) .set(user::age, null)
.set(user::type, null) .set(user::type, null)
.where(user::id, eq(100L)) .where(user::id, eq(100L))
.zipkinContext(null)
.sync(); .sync();
Fun.Tuple3<String, Integer, UserType> tuple = Fun.Tuple3<String, Integer, UserType> tuple =
@ -207,18 +204,14 @@ public class SimpleUserTest extends AbstractEmbeddedCassandraTest {
Assert.assertEquals(0L, cnt); Assert.assertEquals(0L, cnt);
} }
public void testZipkin() throws TimeoutException { public void testFunTuple() throws TimeoutException {
session Fun.Tuple1<String> tf =
.update() session.select(user::name).where(user::id, eq(100L)).single().sync().orElse(null);
.set(user::name, null) if (tf != null) {
.set(user::age, null) Assert.assertEquals(Fun.class, tf.getClass().getEnclosingClass());
.set(user::type, null) String name = tf._1;
.where(user::id, eq(100L)) Assert.assertEquals("greg", name);
.zipkinContext(null) }
.sync();
UpdateOperation<ResultSet> update = session.update();
update.set(user::name, null).zipkinContext(null).sync();
} }
private void assertUsers(User expected, User actual) { private void assertUsers(User expected, User actual) {

View file

@ -90,22 +90,38 @@ public class AndThenOrderTest extends AbstractEmbeddedCassandraTest {
.andThen( .andThen(
() -> { () -> {
q.add("1"); q.add("1");
})
.orElse(
() -> {
q.add("a");
}); });
uow2 = session.begin(uow3); uow2 = session.begin(uow3);
uow2.commit() uow2.commit()
.andThen( .andThen(
() -> { () -> {
q.add("2"); q.add("2");
})
.orElse(
() -> {
q.add("b");
}); });
uow3.commit() uow3.commit()
.andThen( .andThen(
() -> { () -> {
q.add("3"); q.add("3");
})
.orElse(
() -> {
q.add("c");
}); });
uow4.commit() uow4.commit()
.andThen( .andThen(
() -> { () -> {
q.add("4"); q.add("4");
})
.orElse(
() -> {
q.add("d");
}); });
throw new Exception(); throw new Exception();
} catch (Exception e) { } catch (Exception e) {
@ -115,10 +131,15 @@ public class AndThenOrderTest extends AbstractEmbeddedCassandraTest {
.andThen( .andThen(
() -> { () -> {
q.add("5"); q.add("5");
})
.orElse(
() -> {
q.add("e");
}); });
System.out.println(q); System.out.println(q);
Assert.assertTrue(q.isEmpty() == true); Assert.assertTrue(
Arrays.equals(q.toArray(new String[5]), new String[] {"a", "b", "c", "d", "e"}));
} }
@Test @Test

View file

@ -17,14 +17,23 @@ package net.helenus.test.integration.core.unitofwork;
import static net.helenus.core.Query.eq; import static net.helenus.core.Query.eq;
import ca.exprofesso.guava.jcache.GuavaCachingProvider;
import com.datastax.driver.core.ConsistencyLevel; import com.datastax.driver.core.ConsistencyLevel;
import com.datastax.driver.core.utils.UUIDs; import com.datastax.driver.core.utils.UUIDs;
import java.io.Serializable;
import java.util.Date;
import java.util.UUID; import java.util.UUID;
import javax.cache.CacheManager;
import javax.cache.Caching;
import javax.cache.configuration.MutableConfiguration;
import javax.cache.spi.CachingProvider;
import net.bytebuddy.utility.RandomString; import net.bytebuddy.utility.RandomString;
import net.helenus.core.Helenus; import net.helenus.core.Helenus;
import net.helenus.core.HelenusSession; import net.helenus.core.HelenusSession;
import net.helenus.core.UnitOfWork; import net.helenus.core.UnitOfWork;
import net.helenus.core.annotation.Cacheable; import net.helenus.core.annotation.Cacheable;
import net.helenus.core.reflect.Entity;
import net.helenus.mapping.MappingUtil;
import net.helenus.mapping.annotation.Constraints; import net.helenus.mapping.annotation.Constraints;
import net.helenus.mapping.annotation.Index; import net.helenus.mapping.annotation.Index;
import net.helenus.mapping.annotation.PartitionKey; import net.helenus.mapping.annotation.PartitionKey;
@ -36,13 +45,24 @@ import org.junit.Test;
@Table @Table
@Cacheable @Cacheable
interface Widget { interface Widget extends Entity, Serializable {
@PartitionKey @PartitionKey
UUID id(); UUID id();
@Index @Index
@Constraints.Distinct() @Constraints.Distinct
String name(); String name();
@Constraints.Distinct(value = {"b"})
String a();
String b();
@Constraints.Distinct(alone = false)
String c();
@Constraints.Distinct(combined = false)
String d();
} }
public class UnitOfWorkTest extends AbstractEmbeddedCassandraTest { public class UnitOfWorkTest extends AbstractEmbeddedCassandraTest {
@ -52,6 +72,14 @@ public class UnitOfWorkTest extends AbstractEmbeddedCassandraTest {
@BeforeClass @BeforeClass
public static void beforeTest() { public static void beforeTest() {
CachingProvider cachingProvider =
Caching.getCachingProvider(GuavaCachingProvider.class.getName());
CacheManager cacheManager = cachingProvider.getCacheManager();
MutableConfiguration<String, Object> configuration = new MutableConfiguration<>();
configuration.setStoreByValue(false).setReadThrough(false);
cacheManager.createCache(
MappingUtil.getTableName(Widget.class, true).toString(), configuration);
session = session =
Helenus.init(getSession()) Helenus.init(getSession())
.showCql() .showCql()
@ -59,6 +87,7 @@ public class UnitOfWorkTest extends AbstractEmbeddedCassandraTest {
.autoCreateDrop() .autoCreateDrop()
.consistencyLevel(ConsistencyLevel.ONE) .consistencyLevel(ConsistencyLevel.ONE)
.idempotentQueryExecution(true) .idempotentQueryExecution(true)
.setCacheManager(cacheManager)
.get(); .get();
widget = session.dsl(Widget.class); widget = session.dsl(Widget.class);
} }
@ -74,6 +103,10 @@ public class UnitOfWorkTest extends AbstractEmbeddedCassandraTest {
.<Widget>insert(widget) .<Widget>insert(widget)
.value(widget::id, key) .value(widget::id, key)
.value(widget::name, RandomString.make(20)) .value(widget::name, RandomString.make(20))
.value(widget::a, RandomString.make(10))
.value(widget::b, RandomString.make(10))
.value(widget::c, RandomString.make(10))
.value(widget::d, RandomString.make(10))
.sync(); .sync();
try (UnitOfWork uow = session.begin()) { try (UnitOfWork uow = session.begin()) {
@ -107,9 +140,11 @@ public class UnitOfWorkTest extends AbstractEmbeddedCassandraTest {
@Test @Test
public void testSelectAfterNestedSelect() throws Exception { public void testSelectAfterNestedSelect() throws Exception {
Widget w1, w2, w3, w4; Widget w1, w1a, w2, w3, w4;
UUID key1 = UUIDs.timeBased(); UUID key1 = UUIDs.timeBased();
UUID key2 = UUIDs.timeBased(); UUID key2 = UUIDs.timeBased();
String cacheKey1 = MappingUtil.getTableName(Widget.class, false) + "." + key1.toString();
String cacheKey2 = MappingUtil.getTableName(Widget.class, false) + "." + key2.toString();
// This should inserted Widget, and not cache it in uow1. // This should inserted Widget, and not cache it in uow1.
try (UnitOfWork uow1 = session.begin()) { try (UnitOfWork uow1 = session.begin()) {
@ -118,10 +153,27 @@ public class UnitOfWorkTest extends AbstractEmbeddedCassandraTest {
.<Widget>insert(widget) .<Widget>insert(widget)
.value(widget::id, key1) .value(widget::id, key1)
.value(widget::name, RandomString.make(20)) .value(widget::name, RandomString.make(20))
.value(widget::a, RandomString.make(10))
.value(widget::b, RandomString.make(10))
.value(widget::c, RandomString.make(10))
.value(widget::d, RandomString.make(10))
.sync(uow1); .sync(uow1);
uow1.getCache().put(cacheKey1, w1);
Assert.assertEquals(w1, uow1.getCache().get(cacheKey1));
try (UnitOfWork uow2 = session.begin(uow1)) { try (UnitOfWork uow2 = session.begin(uow1)) {
// A "SELECT * FROM widget" query does not contain enough information to fetch an item from cache.
// This will miss, until we implement a statement cache.
w1a =
session
.<Widget>selectAll(Widget.class)
.sync(uow2)
.filter(w -> w.id().equals(key1))
.findFirst()
.orElse(null);
Assert.assertTrue(w1.equals(w1a));
// This should read from uow1's cache and return the same Widget. // This should read from uow1's cache and return the same Widget.
w2 = w2 =
session session
@ -132,14 +184,21 @@ public class UnitOfWorkTest extends AbstractEmbeddedCassandraTest {
.orElse(null); .orElse(null);
Assert.assertEquals(w1, w2); Assert.assertEquals(w1, w2);
uow2.getCache().put(cacheKey2, w2);
w3 = w3 =
session session
.<Widget>insert(widget) .<Widget>insert(widget)
.value(widget::id, key2) .value(widget::id, key2)
.value(widget::name, RandomString.make(20)) .value(widget::name, RandomString.make(20))
.value(widget::a, RandomString.make(10))
.value(widget::b, RandomString.make(10))
.value(widget::c, RandomString.make(10))
.value(widget::d, RandomString.make(10))
.sync(uow2); .sync(uow2);
Assert.assertEquals(w1, uow2.getCache().get(cacheKey1));
Assert.assertEquals(w2, uow2.getCache().get(cacheKey2));
uow2.commit() uow2.commit()
.andThen( .andThen(
() -> { () -> {
@ -151,7 +210,8 @@ public class UnitOfWorkTest extends AbstractEmbeddedCassandraTest {
w4 = w4 =
session session
.<Widget>select(widget) .<Widget>select(widget)
.where(widget::id, eq(key2)) .where(widget::a, eq(w3.a()))
.and(widget::b, eq(w3.b()))
.single() .single()
.sync(uow1) .sync(uow1)
.orElse(null); .orElse(null);
@ -175,6 +235,10 @@ public class UnitOfWorkTest extends AbstractEmbeddedCassandraTest {
.<Widget>insert(widget) .<Widget>insert(widget)
.value(widget::id, key) .value(widget::id, key)
.value(widget::name, RandomString.make(20)) .value(widget::name, RandomString.make(20))
.value(widget::a, RandomString.make(10))
.value(widget::b, RandomString.make(10))
.value(widget::c, RandomString.make(10))
.value(widget::d, RandomString.make(10))
.sync(uow); .sync(uow);
// This should read from the database and return a Widget. // This should read from the database and return a Widget.
@ -209,6 +273,10 @@ public class UnitOfWorkTest extends AbstractEmbeddedCassandraTest {
.<Widget>insert(widget) .<Widget>insert(widget)
.value(widget::id, key) .value(widget::id, key)
.value(widget::name, RandomString.make(20)) .value(widget::name, RandomString.make(20))
.value(widget::a, RandomString.make(10))
.value(widget::b, RandomString.make(10))
.value(widget::c, RandomString.make(10))
.value(widget::d, RandomString.make(10))
.sync(); .sync();
try (UnitOfWork uow = session.begin()) { try (UnitOfWork uow = session.begin()) {
@ -219,13 +287,18 @@ public class UnitOfWorkTest extends AbstractEmbeddedCassandraTest {
Assert.assertEquals(w1, w2); Assert.assertEquals(w1, w2);
// This should remove the object from the session cache. // This should remove the object from the session cache.
w3 =
session.<Widget>update(w2).set(widget::name, "Bill").where(widget::id, eq(key)).sync(uow); session.<Widget>update(w2).set(widget::name, "Bill").where(widget::id, eq(key)).sync(uow);
w3 =
session
.<Widget>update(w2)
.set(widget::name, w1.name())
.where(widget::id, eq(key))
.sync(uow);
// Fetch from session cache, should have old name. // Fetch from session cache will cache miss (as it was updated) and trigger a SELECT.
w4 = session.<Widget>select(widget).where(widget::id, eq(key)).single().sync().orElse(null); w4 = session.<Widget>select(widget).where(widget::id, eq(key)).single().sync().orElse(null);
Assert.assertEquals(w4, w2); Assert.assertEquals(w4, w2);
Assert.assertEquals(w4.name(), w1.name()); Assert.assertEquals(w4.name(), w3.name());
// This should skip the cache. // This should skip the cache.
w5 = w5 =
@ -237,15 +310,13 @@ public class UnitOfWorkTest extends AbstractEmbeddedCassandraTest {
.sync() .sync()
.orElse(null); .orElse(null);
Assert.assertNotEquals(w5, w2); // Not the same instance, Assert.assertTrue(w5.equals(w2));
Assert.assertTrue(w2.equals(w5)); // but they have the same values, Assert.assertTrue(w2.equals(w5));
Assert.assertFalse(w5.equals(w2)); // regardless of the order when comparing.
Assert.assertEquals(w5.name(), "Bill");
uow.commit() uow.commit()
.andThen( .andThen(
() -> { () -> {
Assert.assertEquals(w1, w2); Assert.assertEquals(w2, w3);
}); });
} }
@ -271,6 +342,10 @@ public class UnitOfWorkTest extends AbstractEmbeddedCassandraTest {
.<Widget>insert(widget) .<Widget>insert(widget)
.value(widget::id, key) .value(widget::id, key)
.value(widget::name, RandomString.make(20)) .value(widget::name, RandomString.make(20))
.value(widget::a, RandomString.make(10))
.value(widget::b, RandomString.make(10))
.value(widget::c, RandomString.make(10))
.value(widget::d, RandomString.make(10))
.sync(); .sync();
try (UnitOfWork uow = session.begin()) { try (UnitOfWork uow = session.begin()) {
@ -279,74 +354,200 @@ public class UnitOfWorkTest extends AbstractEmbeddedCassandraTest {
w2 = w2 =
session.<Widget>select(widget).where(widget::id, eq(key)).single().sync(uow).orElse(null); session.<Widget>select(widget).where(widget::id, eq(key)).single().sync(uow).orElse(null);
String cacheKey = MappingUtil.getTableName(Widget.class, false) + "." + key.toString();
uow.getCache().put(cacheKey, w1);
// This should remove the object from the cache. // This should remove the object from the cache.
session.delete(widget).where(widget::id, eq(key)).sync(uow); session.delete(widget).where(widget::id, eq(key)).sync(uow);
uow.getCache().remove(cacheKey);
// This should fail to read from the cache. // This should fail to read from the cache.
w3 = w3 =
session.<Widget>select(widget).where(widget::id, eq(key)).single().sync(uow).orElse(null); session.<Widget>select(widget).where(widget::id, eq(key)).single().sync(uow).orElse(null);
Assert.assertEquals(w3, null); Assert.assertEquals(null, w3);
uow.commit() uow.commit()
.andThen( .andThen(
() -> { () -> {
Assert.assertEquals(w1, w2); Assert.assertEquals(w1, w2);
Assert.assertEquals(w3, null); Assert.assertEquals(null, w3);
}); });
} }
w4 = session.<Widget>select(widget).where(widget::id, eq(key)).single().sync().orElse(null);
Assert.assertEquals(null, w4);
}
@Test
public void testBatchingUpdatesAndInserts() throws Exception {
Widget w1, w2, w3, w4, w5, w6;
Long committedAt = 0L;
UUID key = UUIDs.timeBased();
String cacheKey = MappingUtil.getTableName(Widget.class, false) + "." + key.toString();
try (UnitOfWork uow = session.begin()) {
w1 =
session
.<Widget>upsert(widget)
.value(widget::id, key)
.value(widget::name, RandomString.make(20))
.value(widget::a, RandomString.make(10))
.value(widget::b, RandomString.make(10))
.value(widget::c, RandomString.make(10))
.value(widget::d, RandomString.make(10))
.batch(uow);
Assert.assertTrue(0L == w1.writtenAt(widget::name));
Assert.assertTrue(0 == w1.ttlOf(widget::name));
uow.getCache().put(cacheKey, w1);
w2 =
session
.<Widget>update(w1)
.set(widget::name, RandomString.make(10))
.where(widget::id, eq(key))
.usingTtl(30)
.batch(uow);
Assert.assertEquals(w1, w2);
Assert.assertTrue(0L == w2.writtenAt(widget::name));
Assert.assertTrue(30 == w1.ttlOf(widget::name));
w3 =
session
.<Widget>select(Widget.class)
.where(widget::id, eq(key))
.single()
.sync(uow)
.orElse(null);
Assert.assertEquals(w2, w3);
Assert.assertTrue(0L == w3.writtenAt(widget::name));
Assert.assertTrue(30 <= w3.ttlOf(widget::name));
w6 =
session
.<Widget>upsert(widget)
.value(widget::id, UUIDs.timeBased())
.value(widget::name, RandomString.make(20))
.value(widget::a, RandomString.make(10))
.value(widget::b, RandomString.make(10))
.value(widget::c, RandomString.make(10))
.value(widget::d, RandomString.make(10))
.batch(uow);
uow.getCache().put(cacheKey, w1);
uow.commit();
committedAt = uow.committedAt();
Date d = new Date(committedAt * 1000);
String date = d.toString();
}
// 'c' is distinct, but not on it's own so this should miss cache
w4 =
session
.<Widget>select(Widget.class)
.where(widget::c, eq(w6.c()))
.single()
.sync()
.orElse(null);
Assert.assertEquals(w6, w4);
//TODO(gburd): fix these.
//long at = w4.writtenAt(widget::name);
//Assert.assertTrue(at == committedAt);
//int ttl4 = w4.ttlOf(widget::name);
//Assert.assertTrue(ttl4 <= 30 && ttl4 > 0);
w5 =
session
.<Widget>select(Widget.class)
.where(widget::id, eq(w6.id()))
.uncached()
.single()
.sync()
.orElse(null);
Assert.assertTrue(w4.equals(w5));
//Assert.assertTrue(w5.writtenAt(widget::name) == committedAt);
int ttl5 = w5.ttlOf(widget::name);
Assert.assertTrue(ttl5 <= 30);
//Assert.assertTrue(w4.writtenAt(widget::name) == w6.writtenAt(widget::name));
}
@Test
public void testInsertNoOp() throws Exception {
Widget w1, w2;
UUID key1 = UUIDs.timeBased();
try (UnitOfWork uow = session.begin()) {
// This should inserted Widget, but not cache it.
w1 =
session
.<Widget>insert(widget)
.value(widget::id, key1)
.value(widget::name, RandomString.make(20))
.sync(uow);
String cacheKey = MappingUtil.getTableName(Widget.class, false) + "." + key1.toString();
uow.getCache().put(cacheKey, w1);
/*
w2 = session.<Widget>upsert(w1)
.value(widget::a, RandomString.make(10))
.value(widget::b, RandomString.make(10))
.value(widget::c, RandomString.make(10))
.value(widget::d, RandomString.make(10))
.sync(uow);
uow.commit();
*/
uow.abort();
}
//Assert.assertEquals(w1, w2);
}
@Test
public void testSelectAfterInsertProperlyCachesEntity() throws Exception {
Widget w1, w2, w3, w4;
UUID key = UUIDs.timeBased();
try (UnitOfWork uow = session.begin()) {
// This should cache the inserted Widget.
w1 =
session
.<Widget>insert(widget)
.value(widget::id, key)
.value(widget::name, RandomString.make(20))
.value(widget::a, RandomString.make(10))
.value(widget::b, RandomString.make(10))
.value(widget::c, RandomString.make(10))
.value(widget::d, RandomString.make(10))
.sync(uow);
String cacheKey = MappingUtil.getTableName(Widget.class, false) + "." + key.toString();
uow.getCache().put(cacheKey, w1);
// This should read from the cache and get the same instance of a Widget.
w2 =
session.<Widget>select(widget).where(widget::id, eq(key)).single().sync(uow).orElse(null);
uow.getCache().put(cacheKey, w1);
uow.commit()
.andThen(
() -> {
Assert.assertEquals(w1, w2);
});
}
// This should read the widget from the session cache and maintain object identity.
w3 = session.<Widget>select(widget).where(widget::id, eq(key)).single().sync().orElse(null);
Assert.assertEquals(w1, w3);
// This should read the widget from the database, no object identity but
// values should match.
w4 = w4 =
session session
.<Widget>select(widget) .<Widget>select(widget)
.where(widget::name, eq(w1.name())) .where(widget::id, eq(key))
.uncached()
.single() .single()
.sync() .sync()
.orElse(null); .orElse(null);
Assert.assertEquals(w4, null); Assert.assertFalse(w1 == w4);
Assert.assertTrue(w1.equals(w4));
Assert.assertTrue(w4.equals(w1));
} }
/*
@Test
public void testInsertNoOp() throws Exception {
Widget w1, w2;
UUID key = UUIDs.timeBased();
try (UnitOfWork uow = session.begin()) {
// This should inserted Widget, but not cache it.
w1 = session.<Widget>insert(widget).value(widget::id, key).value(widget::name, RandomString.make(20)).sync(uow);
w2 = session.<Widget>insert(w1).value(widget::id, key).sync(uow);
}
Assert.assertEquals(w1, w2);
}
*/
/*
* @Test public void testSelectAfterInsertProperlyCachesEntity() throws
* Exception { Widget w1, w2, w3, w4; UUID key = UUIDs.timeBased();
*
* try (UnitOfWork uow = session.begin()) {
*
* // This should cache the inserted Widget. w1 = session.<Widget>insert(widget)
* .value(widget::id, key) .value(widget::name, RandomString.make(20))
* .sync(uow);
*
* // This should read from the cache and get the same instance of a Widget. w2
* = session.<Widget>select(widget) .where(widget::id, eq(key)) .single()
* .sync(uow) .orElse(null);
*
* uow.commit() .andThen(() -> { Assert.assertEquals(w1, w2); }); }
*
* // This should read the widget from the session cache and maintain object
* identity. w3 = session.<Widget>select(widget) .where(widget::id, eq(key))
* .single() .sync() .orElse(null);
*
* Assert.assertEquals(w1, w3);
*
* // This should read the widget from the database, no object identity but
* values should match. w4 = session.<Widget>select(widget) .where(widget::id,
* eq(key)) .uncached() .single() .sync() .orElse(null);
*
* Assert.assertNotEquals(w1, w4); Assert.assertTrue(w1.equals(w4)); }
*/
} }

View file

@ -22,9 +22,12 @@ import java.text.SimpleDateFormat;
import java.util.Date; import java.util.Date;
import java.util.UUID; import java.util.UUID;
import java.util.concurrent.TimeoutException; import java.util.concurrent.TimeoutException;
import net.helenus.core.ConflictingUnitOfWorkException;
import net.helenus.core.Helenus; import net.helenus.core.Helenus;
import net.helenus.core.HelenusSession; import net.helenus.core.HelenusSession;
import net.helenus.core.UnitOfWork;
import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest; import net.helenus.test.integration.build.AbstractEmbeddedCassandraTest;
import org.junit.Assert;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
@ -78,6 +81,34 @@ public class MaterializedViewTest extends AbstractEmbeddedCassandraTest {
.from(CyclistsByAge.class) .from(CyclistsByAge.class)
.where(cyclist::age, eq(18)) .where(cyclist::age, eq(18))
.allowFiltering() .allowFiltering()
.single()
.sync(); .sync();
} }
@Test
public void testMvUnitOfWork()
throws TimeoutException, ConflictingUnitOfWorkException, Exception {
Cyclist c1, c2;
UnitOfWork uow = session.begin();
c1 =
session
.<Cyclist>select(Cyclist.class)
.from(CyclistsByAge.class)
.where(cyclist::age, eq(18))
.single()
.sync(uow)
.orElse(null);
c2 =
session
.<Cyclist>select(Cyclist.class)
.from(CyclistsByAge.class)
.where(cyclist::age, eq(18))
.single()
.sync(uow)
.orElse(null);
Assert.assertEquals(c1, c2);
uow.commit();
}
} }

View file

@ -39,7 +39,7 @@ public interface Account {
return new Draft(); return new Draft();
} }
class Draft implements Drafted { // TODO class Draft implements Drafted<Account> {
@Override @Override
public Set<String> mutated() { public Set<String> mutated() {
@ -47,7 +47,12 @@ public interface Account {
} }
@Override @Override
public Object build() { public Account build() {
return null;
}
@Override
public Set<String> read() {
return null; return null;
} }