Helenus was the son of King Priam and Queen Hecuba of Troy, and the twin brother of the prophetess Cassandra. Like Cassandra, he was always right, but unlike her, others believed him. Seems like a good name for a layer to access DataStax, Apache, or ScyllaDB's implementations of the Cassandra database.

- move to 3.x DataStax drivers (NOTE, this broke the Casser.dsl() API a bit)
 - introduce UnitOfWork-style pseudo transactional scoping (WIP as of this commit)
 - introduce session and UnitOfWork scoped caches (WIP as of this commit)
 - add support for SASI-based secondary index types (such as case insensitive matching, etc.)
 - add support for Lucene-based secondary index types (WIP https://github.com/Stratio/cassandra-lucene-index)
 - update types for Java 8 (WIP)
 - add in Codahale/Dropwizard and Zipkin metrics/tracing support (WIP)
 - and a lot more...
This commit is contained in:
Greg Burd 2017-07-17 09:42:00 -04:00
parent 13a2f5bffa
commit 715fb0e673
278 changed files with 8690 additions and 8113 deletions

1
.idea/.name Normal file
View file

@ -0,0 +1 @@
helenus-core

View file

@ -0,0 +1,11 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="EclipseCodeFormatterProjectSettings">
<option name="projectSpecificProfile">
<ProjectSpecificProfile>
<option name="formatter" value="ECLIPSE" />
<option name="pathToConfigFileJava" value="$PROJECT_DIR$/../newton/formatting/onshape-eclipse-general-preferences.epf" />
</ProjectSpecificProfile>
</option>
</component>
</project>

6
.idea/encodings.xml Normal file
View file

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="Encoding">
<file url="file://$PROJECT_DIR$" charset="UTF-8" />
</component>
</project>

3
AUTHORS Normal file
View file

@ -0,0 +1,3 @@
* Gregory Burd <gburd@onshape.com> <greg@burd.me> @gregburd github:gburd keybase:gregburd
* Alex Shvid <a@shvid.com>

View file

@ -1,4 +1,4 @@
# casser
# helenus
Fast and easy, functional style cutting edge Java 8 and Scala 2.11 Cassandra client
Current status: First application in production (may be more)
@ -26,8 +26,8 @@ Latest release dependency:
```
<dependencies>
<dependency>
<groupId>com.noorq.casser</groupId>
<artifactId>casser-core</artifactId>
<groupId>net.helenus</groupId>
<artifactId>helenus-core</artifactId>
<version>1.1.0_2.11</version>
</dependency>
</dependencies>
@ -37,8 +37,8 @@ Active development dependency for Scala 2.11:
```
<dependencies>
<dependency>
<groupId>com.noorq.casser</groupId>
<artifactId>casser-core</artifactId>
<groupId>net.helenus</groupId>
<artifactId>helenus-core</artifactId>
<version>1.2.0_2.11-SNAPSHOT</version>
</dependency>
</dependencies>
@ -77,8 +77,8 @@ public interface Timeline {
Session initialization:
```
Timeline timeline = Casser.dsl(Timeline.class);
CasserSession session = Casser.init(getSession()).showCql().add(Timeline.class).autoCreateDrop().get();
Timeline timeline = Helenus.dsl(Timeline.class);
HelenusSession session = Helenus.init(getSession()).showCql().add(Timeline.class).autoCreateDrop().get();
```
Select example:
@ -138,7 +138,7 @@ Abstract repository:
```
public interface AbstractRepository {
CasserSession session();
HelenusSession session();
}
```
@ -149,7 +149,7 @@ import scala.concurrent.Future;
public interface AccountRepository extends AbstractRepository {
static final Account account = Casser.dsl(Account.class);
static final Account account = Helenus.dsl(Account.class);
static final String DEFAULT_TIMEZONE = "America/Los_Angeles";

99
helenus-core.iml Normal file
View file

@ -0,0 +1,99 @@
<?xml version="1.0" encoding="UTF-8"?>
<module org.jetbrains.idea.maven.project.MavenProjectsManager.isMavenModule="true" type="JAVA_MODULE" version="4">
<component name="NewModuleRootManager" LANGUAGE_LEVEL="JDK_1_8">
<output url="file://$MODULE_DIR$/target/classes" />
<output-test url="file://$MODULE_DIR$/target/test-classes" />
<content url="file://$MODULE_DIR$">
<sourceFolder url="file://$MODULE_DIR$/src/main/java" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src/test/java" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/test/resources" type="java-test-resource" />
<excludeFolder url="file://$MODULE_DIR$/target" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
<orderEntry type="library" name="Maven: org.scala-lang:scala-library:2.13.0-M1" level="project" />
<orderEntry type="library" name="Maven: com.datastax.cassandra:cassandra-driver-core:3.3.0" level="project" />
<orderEntry type="library" name="Maven: io.netty:netty-handler:4.0.47.Final" level="project" />
<orderEntry type="library" name="Maven: io.netty:netty-buffer:4.0.47.Final" level="project" />
<orderEntry type="library" name="Maven: io.netty:netty-common:4.0.47.Final" level="project" />
<orderEntry type="library" name="Maven: io.netty:netty-transport:4.0.47.Final" level="project" />
<orderEntry type="library" name="Maven: io.netty:netty-codec:4.0.47.Final" level="project" />
<orderEntry type="library" name="Maven: io.dropwizard.metrics:metrics-core:3.2.2" level="project" />
<orderEntry type="library" name="Maven: com.github.jnr:jnr-ffi:2.0.7" level="project" />
<orderEntry type="library" name="Maven: com.github.jnr:jffi:1.2.10" level="project" />
<orderEntry type="library" scope="RUNTIME" name="Maven: com.github.jnr:jffi:native:1.2.10" level="project" />
<orderEntry type="library" name="Maven: org.ow2.asm:asm:5.0.3" level="project" />
<orderEntry type="library" name="Maven: org.ow2.asm:asm-commons:5.0.3" level="project" />
<orderEntry type="library" name="Maven: org.ow2.asm:asm-analysis:5.0.3" level="project" />
<orderEntry type="library" name="Maven: org.ow2.asm:asm-tree:5.0.3" level="project" />
<orderEntry type="library" name="Maven: org.ow2.asm:asm-util:5.0.3" level="project" />
<orderEntry type="library" name="Maven: com.github.jnr:jnr-x86asm:1.0.2" level="project" />
<orderEntry type="library" name="Maven: com.github.jnr:jnr-posix:3.0.27" level="project" />
<orderEntry type="library" name="Maven: com.github.jnr:jnr-constants:0.9.0" level="project" />
<orderEntry type="library" name="Maven: org.aspectj:aspectjrt:1.8.10" level="project" />
<orderEntry type="library" name="Maven: org.aspectj:aspectjweaver:1.8.10" level="project" />
<orderEntry type="library" name="Maven: org.apache.commons:commons-lang3:3.6" level="project" />
<orderEntry type="library" name="Maven: org.springframework:spring-core:4.3.10.RELEASE" level="project" />
<orderEntry type="library" name="Maven: commons-logging:commons-logging:1.2" level="project" />
<orderEntry type="library" name="Maven: com.google.guava:guava:16.0.1" level="project" />
<orderEntry type="library" name="Maven: javax.validation:validation-api:2.0.0.CR3" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.codehaus.jackson:jackson-mapper-asl:1.9.13" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.codehaus.jackson:jackson-core-asl:1.9.13" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.cassandraunit:cassandra-unit:3.1.3.2" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.apache.cassandra:cassandra-all:3.11.0" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.xerial.snappy:snappy-java:1.1.1.7" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: net.jpountz.lz4:lz4:1.3.0" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: com.ning:compress-lzf:0.8.4" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: commons-cli:commons-cli:1.1" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: commons-codec:commons-codec:1.9" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.apache.commons:commons-math3:3.2" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: com.googlecode.concurrentlinkedhashmap:concurrentlinkedhashmap-lru:1.4" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.antlr:antlr:3.5.2" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.antlr:ST4:4.0.8" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.antlr:antlr-runtime:3.5.2" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.slf4j:log4j-over-slf4j:1.7.7" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: com.googlecode.json-simple:json-simple:1.1" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: com.boundary:high-scale-lib:1.0.6" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.yaml:snakeyaml:1.11" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.mindrot:jbcrypt:0.3m" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: io.dropwizard.metrics:metrics-jvm:3.1.0" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: com.addthis.metrics:reporter-config3:3.0.3" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: com.addthis.metrics:reporter-config-base:3.0.3" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.hibernate:hibernate-validator:4.3.0.Final" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.jboss.logging:jboss-logging:3.1.0.CR2" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: com.thinkaurelius.thrift:thrift-server:0.3.7" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: com.lmax:disruptor:3.0.1" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: com.clearspring.analytics:stream:2.5.2" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: it.unimi.dsi:fastutil:6.5.7" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.apache.thrift:libthrift:0.9.2" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.apache.httpcomponents:httpclient:4.2.5" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.apache.httpcomponents:httpcore:4.2.4" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.apache.cassandra:cassandra-thrift:3.11.0" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: com.carrotsearch:hppc:0.5.4" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: de.jflex:jflex:1.6.0" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.apache.ant:ant:1.7.0" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.apache.ant:ant-launcher:1.7.0" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: com.github.rholder:snowball-stemmer:1.3.0.581.1" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: com.googlecode.concurrent-trees:concurrent-trees:2.4.0" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: net.java.dev.jna:jna:4.4.0" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: com.github.jbellis:jamm:0.3.0" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: io.netty:netty-all:4.0.44.Final" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: joda-time:joda-time:2.4" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.fusesource:sigar:1.6.4" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.eclipse.jdt.core.compiler:ecj:4.4.2" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.caffinitas.ohc:ohc-core:0.4.4" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: com.github.ben-manes.caffeine:caffeine:2.2.6" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.jctools:jctools-core:1.2.1" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: commons-io:commons-io:2.5" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: junit:junit:4.12" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: com.github.stephenc:jamm:0.2.5" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.hamcrest:hamcrest-library:1.3" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.hamcrest:hamcrest-core:1.3" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.mockito:mockito-core:2.8.47" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: net.bytebuddy:byte-buddy:1.6.14" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: net.bytebuddy:byte-buddy-agent:1.6.14" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.objenesis:objenesis:2.5" level="project" />
<orderEntry type="library" name="Maven: org.slf4j:slf4j-api:1.7.1" level="project" />
<orderEntry type="library" scope="RUNTIME" name="Maven: org.slf4j:jcl-over-slf4j:1.7.1" level="project" />
</component>
</module>

736
pom.xml
View file

@ -1,410 +1,418 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.noorq.casser</groupId>
<artifactId>casser-core</artifactId>
<version>1.2.0_2.11-SNAPSHOT</version>
<packaging>jar</packaging>
<modelVersion>4.0.0</modelVersion>
<groupId>net.helenus</groupId>
<artifactId>helenus-core</artifactId>
<version>2.0.0-SNAPSHOT</version>
<packaging>jar</packaging>
<name>casser</name>
<description>Casser Cassandra Client</description>
<url>https://github.com/noorq/casser</url>
<name>helenus</name>
<description>Helenus Cassandra Client</description>
<url>https://helenus.net/</url>
<licenses>
<license>
<name>The Apache Software License, Version 2.0</name>
<url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
<distribution>repo</distribution>
</license>
</licenses>
<licenses>
<license>
<name>The Apache Software License, Version 2.0</name>
<url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
<distribution>repo</distribution>
</license>
</licenses>
<scm>
<url>https://github.com/noorq/casser</url>
<connection>scm:git:git@github.com:noorq/casser.git</connection>
<developerConnection>scm:git:git@github.com:noorq/casser.git</developerConnection>
</scm>
<scm>
<url>https://github.net.helenus</url>
<connection>scm:git:git@github.com:gburd/helenus.git</connection>
<developerConnection>scm:git:git@github.com:gburd/helenus.git</developerConnection>
</scm>
<issueManagement>
<system>GitHub</system>
<url>https://github.com/noorq/casser/issues</url>
</issueManagement>
<issueManagement>
<system>GitHub</system>
<url>https://github.com/gburd/helenus/issues</url>
</issueManagement>
<parent>
<groupId>org.sonatype.oss</groupId>
<artifactId>oss-parent</artifactId>
<version>7</version>
</parent>
<parent>
<groupId>org.sonatype.oss</groupId>
<artifactId>oss-parent</artifactId>
<version>7</version>
</parent>
<properties>
<dist.id>casser</dist.id>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<cassandra-unit.version>2.0.2.2</cassandra-unit.version>
<cassandra-driver-core.version>2.1.5</cassandra-driver-core.version>
<cassandra>2.1.4</cassandra>
<guava.version>16.0.1</guava.version>
<hamcrest>1.3</hamcrest>
<jodatime>2.1</jodatime>
<junit>4.11</junit>
<jamm>0.2.5</jamm>
<slf4j>1.7.1</slf4j>
<logback>1.0.11</logback>
<mockito>1.9.5</mockito>
<jackson>1.9.13</jackson>
</properties>
<properties>
<dist.id>helenus</dist.id>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
<profiles>
<profile>
<id>release</id>
<profile>
<id>release</id>
<build>
<build>
<plugins>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-gpg-plugin</artifactId>
<version>1.6</version>
<executions>
<execution>
<id>sign-artifacts</id>
<phase>verify</phase>
<goals>
<goal>sign</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-gpg-plugin</artifactId>
<version>1.6</version>
<executions>
<execution>
<id>sign-artifacts</id>
<phase>verify</phase>
<goals>
<goal>sign</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</plugins>
</build>
</build>
</profile>
</profile>
</profiles>
<dependencies>
<dependencies>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>2.11.6</version>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>2.13.0-M1</version>
</dependency>
<dependency>
<groupId>com.datastax.cassandra</groupId>
<artifactId>cassandra-driver-core</artifactId>
<version>${cassandra-driver-core.version}</version>
<exclusions>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>guava</artifactId>
<groupId>com.google.guava</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.datastax.cassandra</groupId>
<artifactId>cassandra-driver-core</artifactId>
<version>3.3.0</version>
<exclusions>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>guava</artifactId>
<groupId>com.google.guava</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>${guava.version}</version>
</dependency>
<dependency>
<groupId>org.aspectj</groupId>
<artifactId>aspectjrt</artifactId>
<version>1.8.10</version>
</dependency>
<!-- Validation -->
<dependency>
<groupId>javax.validation</groupId>
<artifactId>validation-api</artifactId>
<version>1.1.0.Final</version>
</dependency>
<dependency>
<groupId>org.aspectj</groupId>
<artifactId>aspectjweaver</artifactId>
<version>1.8.10</version>
</dependency>
<!-- TESTS -->
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.6</version>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
<version>${jackson}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-core</artifactId>
<version>4.3.10.RELEASE</version>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
<version>${jackson}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>16.0.1</version>
</dependency>
<dependency>
<groupId>org.cassandraunit</groupId>
<artifactId>cassandra-unit</artifactId>
<version>${cassandra-unit.version}</version>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>com.datastax.cassandra</groupId>
<artifactId>cassandra-driver-core</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- Validation -->
<dependency>
<groupId>javax.validation</groupId>
<artifactId>validation-api</artifactId>
<version>2.0.0.CR3</version>
</dependency>
<dependency>
<groupId>org.apache.cassandra</groupId>
<artifactId>cassandra-all</artifactId>
<version>${cassandra}</version>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-core</artifactId>
</exclusion>
<exclusion>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
</exclusion>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- TESTS -->
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>2.4</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
<version>1.9.13</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>${junit}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
<version>1.9.13</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.github.stephenc</groupId>
<artifactId>jamm</artifactId>
<version>${jamm}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.cassandraunit</groupId>
<artifactId>cassandra-unit</artifactId>
<version>3.1.3.2</version>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>com.datastax.cassandra</groupId>
<artifactId>cassandra-driver-core</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.hamcrest</groupId>
<artifactId>hamcrest-library</artifactId>
<version>${hamcrest}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.cassandra</groupId>
<artifactId>cassandra-all</artifactId>
<version>3.11.0</version>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-core</artifactId>
</exclusion>
<exclusion>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
</exclusion>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.hamcrest</groupId>
<artifactId>hamcrest-core</artifactId>
<version>${hamcrest}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>2.5</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<version>${mockito}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
<scope>test</scope>
</dependency>
<!-- Logging -->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${slf4j}</version>
</dependency>
<dependency>
<groupId>com.github.stephenc</groupId>
<artifactId>jamm</artifactId>
<version>0.2.5</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId>
<version>${slf4j}</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.hamcrest</groupId>
<artifactId>hamcrest-library</artifactId>
<version>1.3</version>
<scope>test</scope>
</dependency>
</dependencies>
<dependency>
<groupId>org.hamcrest</groupId>
<artifactId>hamcrest-core</artifactId>
<version>1.3</version>
<scope>test</scope>
</dependency>
<build>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<version>2.8.47</version>
<scope>test</scope>
</dependency>
<testResources>
<testResource>
<directory>src/test/resources</directory>
<filtering>true</filtering>
<includes>
<include>**/*</include>
</includes>
</testResource>
</testResources>
<!-- Logging -->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.1</version>
</dependency>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.1</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
<testSource>1.8</testSource>
<testTarget>1.8</testTarget>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>2.2.1</version>
<executions>
<execution>
<id>attach-sources</id>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>2.9.1</version>
<configuration>
<aggregate>true</aggregate>
<author>true</author>
<bottom>true</bottom>
<destDir>target/javadoc</destDir>
<packagenames>casser.*</packagenames>
<use>true</use>
<version>true</version>
</configuration>
<executions>
<execution>
<id>attach-javadocs</id>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-eclipse-plugin</artifactId>
<version>2.8</version>
<configuration>
<downloadSources>true</downloadSources>
<downloadJavadocs>true</downloadJavadocs>
<wtpversion>2.0</wtpversion>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<version>2.8</version>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId>
<version>1.8</version>
<executions>
<execution>
<id>reserve-network-port</id>
<goals>
<goal>reserve-network-port</goal>
</goals>
<phase>process-resources</phase>
<configuration>
<portNames>
<portName>build.cassandra.native_transport_port</portName>
<portName>build.cassandra.rpc_port</portName>
<portName>build.cassandra.storage_port</portName>
<portName>build.cassandra.ssl_storage_port</portName>
</portNames>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.18.1</version>
<configuration>
<parallel>methods</parallel>
<threadCount>10</threadCount>
<useFile>false</useFile>
<includes>
<include>**/test/unit/**/*.java</include>
</includes>
<excludes>
<exclude>**/test/integration/**/*.java</exclude>
<exclude>**/test/performance/**/*.java</exclude>
</excludes>
<systemPropertyVariables>
<java.util.logging.config.file>src/test/resources/logging.properties</java.util.logging.config.file>
</systemPropertyVariables>
</configuration>
</plugin>
<plugin>
<groupId>org.bitstrings.maven.plugins</groupId>
<artifactId>dependencypath-maven-plugin</artifactId>
<version>1.1.1</version>
<executions>
<execution>
<id>set-all</id>
<goals>
<goal>set</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-failsafe-plugin</artifactId>
<version>2.16</version>
<configuration>
<forkCount>1</forkCount>
<argLine>-Xmx1024m -Xss512m
-javaagent:${com.github.stephenc:jamm:jar}</argLine>
<reuseForks>true</reuseForks>
<useFile>false</useFile>
<includes>
<include>**/test/integration/**/*.java</include>
</includes>
<excludes>
<exclude>**/test/unit/**/*.java</exclude>
<exclude>**/test/performance/**/*.java</exclude>
</excludes>
<systemPropertyVariables>
<java.util.logging.config.file>src/test/resources/logging.properties</java.util.logging.config.file>
<maven.integration.test>true</maven.integration.test>
</systemPropertyVariables>
</configuration>
<executions>
<execution>
<goals>
<goal>integration-test</goal>
<goal>verify</goal>
</goals>
</execution>
</executions>
</plugin>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId>
<version>1.7.1</version>
<scope>runtime</scope>
</dependency>
</plugins>
</build>
</dependencies>
<build>
<testResources>
<testResource>
<directory>src/test/resources</directory>
<filtering>true</filtering>
<includes>
<include>**/*</include>
</includes>
</testResource>
</testResources>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.1</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
<testSource>1.8</testSource>
<testTarget>1.8</testTarget>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>2.2.1</version>
<executions>
<execution>
<id>attach-sources</id>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>2.9.1</version>
<configuration>
<aggregate>true</aggregate>
<author>true</author>
<bottom>true</bottom>
<destDir>target/javadoc</destDir>
<use>true</use>
<version>true</version>
</configuration>
<executions>
<execution>
<id>attach-javadocs</id>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-eclipse-plugin</artifactId>
<version>2.8</version>
<configuration>
<downloadSources>true</downloadSources>
<downloadJavadocs>true</downloadJavadocs>
<wtpversion>2.0</wtpversion>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<version>2.8</version>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId>
<version>1.8</version>
<executions>
<execution>
<id>reserve-network-port</id>
<goals>
<goal>reserve-network-port</goal>
</goals>
<phase>process-resources</phase>
<configuration>
<portNames>
<portName>build.cassandra.native_transport_port</portName>
<portName>build.cassandra.rpc_port</portName>
<portName>build.cassandra.storage_port</portName>
<portName>build.cassandra.ssl_storage_port</portName>
</portNames>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.18.1</version>
<configuration>
<parallel>methods</parallel>
<threadCount>10</threadCount>
<useFile>false</useFile>
<includes>
<include>**/test/unit/**/*.java</include>
</includes>
<excludes>
<exclude>**/test/integration/**/*.java</exclude>
<exclude>**/test/performance/**/*.java</exclude>
</excludes>
<systemPropertyVariables>
<java.util.logging.config.file>src/test/resources/logging.properties
</java.util.logging.config.file>
</systemPropertyVariables>
</configuration>
</plugin>
<plugin>
<groupId>org.bitstrings.maven.plugins</groupId>
<artifactId>dependencypath-maven-plugin</artifactId>
<version>1.1.1</version>
<executions>
<execution>
<id>set-all</id>
<goals>
<goal>set</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-failsafe-plugin</artifactId>
<version>2.16</version>
<configuration>
<forkCount>1</forkCount>
<!-- argLine>-Xmx1024m -Xss512m -javaagent:${com.github.stephenc:jamm:jar}</argLine -->
<reuseForks>true</reuseForks>
<useFile>false</useFile>
<includes>
<include>**/test/integration/**/*.java</include>
</includes>
<excludes>
<exclude>**/test/unit/**/*.java</exclude>
<exclude>**/test/performance/**/*.java</exclude>
</excludes>
<systemPropertyVariables>
<java.util.logging.config.file>src/test/resources/logging.properties
</java.util.logging.config.file>
<maven.integration.test>true</maven.integration.test>
</systemPropertyVariables>
</configuration>
<executions>
<execution>
<goals>
<goal>integration-test</goal>
<goal>verify</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View file

@ -0,0 +1,22 @@
package com.datastax.driver.core;
import java.util.Arrays;
import java.util.List;
public class DefaultMetadata extends Metadata {
public DefaultMetadata() { super(null); }
private DefaultMetadata(Cluster.Manager cluster) {
super(cluster);
}
public TupleType newTupleType(DataType... types) {
return newTupleType(Arrays.asList(types));
}
public TupleType newTupleType(List<DataType> types) {
return new TupleType(types, ProtocolVersion.NEWEST_SUPPORTED, CodecRegistry.DEFAULT_INSTANCE);
}
}

View file

@ -0,0 +1,132 @@
package com.datastax.driver.core.schemabuilder;
import com.google.common.base.Optional;
import static com.datastax.driver.core.schemabuilder.SchemaStatement.STATEMENT_START;
import static com.datastax.driver.core.schemabuilder.SchemaStatement.validateNotEmpty;
import static com.datastax.driver.core.schemabuilder.SchemaStatement.validateNotKeyWord;
public class CreateCustomIndex extends CreateIndex {
private String indexName;
private boolean ifNotExists = false;
private Optional<String> keyspaceName = Optional.absent();
private String tableName;
private String columnName;
private boolean keys;
CreateCustomIndex(String indexName) {
super(indexName);
validateNotEmpty(indexName, "Index name");
validateNotKeyWord(indexName, String.format("The index name '%s' is not allowed because it is a reserved keyword", indexName));
this.indexName = indexName;
}
/**
* Add the 'IF NOT EXISTS' condition to this CREATE INDEX statement.
*
* @return this CREATE INDEX statement.
*/
public CreateIndex ifNotExists() {
this.ifNotExists = true;
return this;
}
/**
* Specify the keyspace and table to create the index on.
*
* @param keyspaceName the keyspace name.
* @param tableName the table name.
* @return a {@link CreateIndex.CreateIndexOn} that will allow the specification of the column.
*/
public CreateIndex.CreateIndexOn onTable(String keyspaceName, String tableName) {
validateNotEmpty(keyspaceName, "Keyspace name");
validateNotEmpty(tableName, "Table name");
validateNotKeyWord(keyspaceName, String.format("The keyspace name '%s' is not allowed because it is a reserved keyword", keyspaceName));
validateNotKeyWord(tableName, String.format("The table name '%s' is not allowed because it is a reserved keyword", tableName));
this.keyspaceName = Optional.fromNullable(keyspaceName);
this.tableName = tableName;
return new CreateCustomIndex.CreateIndexOn();
}
/**
* Specify the table to create the index on.
*
* @param tableName the table name.
* @return a {@link CreateIndex.CreateIndexOn} that will allow the specification of the column.
*/
public CreateIndex.CreateIndexOn onTable(String tableName) {
validateNotEmpty(tableName, "Table name");
validateNotKeyWord(tableName, String.format("The table name '%s' is not allowed because it is a reserved keyword", tableName));
this.tableName = tableName;
return new CreateCustomIndex.CreateIndexOn();
}
public class CreateIndexOn extends CreateIndex.CreateIndexOn {
/**
* Specify the column to create the index on.
*
* @param columnName the column name.
* @return the final CREATE INDEX statement.
*/
public SchemaStatement andColumn(String columnName) {
validateNotEmpty(columnName, "Column name");
validateNotKeyWord(columnName, String.format("The column name '%s' is not allowed because it is a reserved keyword", columnName));
CreateCustomIndex.this.columnName = columnName;
return SchemaStatement.fromQueryString(buildInternal());
}
/**
* Create an index on the keys of the given map column.
*
* @param columnName the column name.
* @return the final CREATE INDEX statement.
*/
public SchemaStatement andKeysOfColumn(String columnName) {
validateNotEmpty(columnName, "Column name");
validateNotKeyWord(columnName, String.format("The column name '%s' is not allowed because it is a reserved keyword", columnName));
CreateCustomIndex.this.columnName = columnName;
CreateCustomIndex.this.keys = true;
return SchemaStatement.fromQueryString(buildInternal());
}
}
String getCustomClassName() { return ""; }
String getOptions() { return ""; }
@Override
public String buildInternal() {
StringBuilder createStatement = new StringBuilder(STATEMENT_START).append("CREATE CUSTOM INDEX ");
if (ifNotExists) {
createStatement.append("IF NOT EXISTS ");
}
createStatement.append(indexName).append(" ON ");
if (keyspaceName.isPresent()) {
createStatement.append(keyspaceName.get()).append(".");
}
createStatement.append(tableName);
createStatement.append("(");
if (keys) {
createStatement.append("KEYS(");
}
createStatement.append(columnName);
if (keys) {
createStatement.append(")");
}
createStatement.append(")");
createStatement.append(" USING '");
createStatement.append(getCustomClassName());
createStatement.append("' WITH OPTIONS = {");
createStatement.append(getOptions());
createStatement.append(" }");
return createStatement.toString();
}
}

View file

@ -0,0 +1,18 @@
package com.datastax.driver.core.schemabuilder;
public class CreateSasiIndex extends CreateCustomIndex {
public CreateSasiIndex(String indexName) {
super(indexName);
}
String getCustomClassName() {
return "org.apache.cassandra.index.sasi.SASIIndex";
}
String getOptions() {
return "'analyzer_class': "
+ "'org.apache.cassandra.index.sasi.analyzer.NonTokenizingAnalyzer', "
+ "'case_sensitive': 'false'";
}
}

View file

@ -0,0 +1,40 @@
/*
* Copyright (C) 2012-2017 DataStax Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datastax.driver.core.schemabuilder;
import com.datastax.driver.core.CodecRegistry;
/**
* A built CREATE TABLE statement.
*/
public class CreateTable extends Create {
public CreateTable(String keyspaceName, String tableName) {
super(keyspaceName, tableName);
}
public CreateTable(String tableName) {
super(tableName);
}
public String getQueryString(CodecRegistry codecRegistry) {
return buildInternal();
}
public String toString() {
return buildInternal();
}
}

View file

@ -1,173 +0,0 @@
/*
* Copyright (C) 2015 The Casser Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import com.datastax.driver.core.Cluster;
import com.datastax.driver.core.Session;
import com.noorq.casser.config.CasserSettings;
import com.noorq.casser.config.DefaultCasserSettings;
import com.noorq.casser.core.reflect.CasserPropertyNode;
import com.noorq.casser.core.reflect.DslExportable;
import com.noorq.casser.mapping.CasserEntity;
import com.noorq.casser.support.CasserMappingException;
public final class Casser {
private static volatile CasserSettings settings = new DefaultCasserSettings();
private static final ConcurrentMap<Class<?>, Object> dslCache = new ConcurrentHashMap<Class<?>, Object>();
private static volatile CasserSession session;
private Casser() {
}
public static CasserSession session() {
return Objects.requireNonNull(session, "session is not initialized");
}
protected static void setSession(CasserSession newSession) {
session = newSession;
}
public static void shutdown() {
if (session != null) {
session.close();
}
session = null;
dslCache.clear();
}
public static CasserSettings settings() {
return settings;
}
public static CasserSettings settings(CasserSettings overrideSettings) {
CasserSettings old = settings;
settings = overrideSettings;
return old;
}
public static SessionInitializer connect(Cluster cluster) {
Session session = cluster.connect();
return new SessionInitializer(session);
}
public static SessionInitializer connect(Cluster cluster, String keyspace) {
Session session = cluster.connect(keyspace);
return new SessionInitializer(session);
}
public static SessionInitializer init(Session session) {
if (session == null) {
throw new IllegalArgumentException("empty session");
}
return new SessionInitializer(session);
}
public static void clearDslCache() {
dslCache.clear();
}
public static <E> E dsl(Class<E> iface) {
return dsl(iface, iface.getClassLoader(), Optional.empty());
}
public static <E> E dsl(Class<E> iface, ClassLoader classLoader) {
return dsl(iface, classLoader, Optional.empty());
}
public static <E> E dsl(Class<E> iface, ClassLoader classLoader, Optional<CasserPropertyNode> parent) {
Object instance = null;
if (!parent.isPresent()) {
instance = dslCache.get(iface);
}
if (instance == null) {
instance = settings.getDslInstantiator().instantiate(iface, classLoader, parent);
if (!parent.isPresent()) {
Object c = dslCache.putIfAbsent(iface, instance);
if (c != null) {
instance = c;
}
}
}
return (E) instance;
}
public static <E> E map(Class<E> iface, Map<String, Object> src) {
return map(iface, src, iface.getClassLoader());
}
public static <E> E map(Class<E> iface, Map<String, Object> src, ClassLoader classLoader) {
return settings.getMapperInstantiator().instantiate(iface, src, classLoader);
}
public static CasserEntity entity(Class<?> iface) {
Object dsl = dsl(iface);
DslExportable e = (DslExportable) dsl;
return e.getCasserMappingEntity();
}
public static CasserEntity resolve(Object ifaceOrDsl) {
if (ifaceOrDsl == null) {
throw new CasserMappingException("ifaceOrDsl is null");
}
if (ifaceOrDsl instanceof DslExportable) {
DslExportable e = (DslExportable) ifaceOrDsl;
return e.getCasserMappingEntity();
}
if (ifaceOrDsl instanceof Class) {
Class<?> iface = (Class<?>) ifaceOrDsl;
if (!iface.isInterface()) {
throw new CasserMappingException("class is not an interface " + iface);
}
return entity(iface);
}
throw new CasserMappingException("unknown dsl object or mapping interface " + ifaceOrDsl);
}
}

View file

@ -1,238 +0,0 @@
/*
* Copyright (C) 2015 The Casser Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core;
import java.util.function.Function;
import com.datastax.driver.core.Row;
import com.noorq.casser.core.reflect.CasserPropertyNode;
import com.noorq.casser.mapping.CasserProperty;
import com.noorq.casser.mapping.value.ColumnValueProvider;
import com.noorq.casser.support.Fun;
public final class Mappers {
private Mappers() {
}
public final static class Mapper1<A> implements Function<Row, Fun.Tuple1<A>> {
private final ColumnValueProvider provider;
private final CasserProperty p1;
public Mapper1(ColumnValueProvider provider, CasserPropertyNode p1) {
this.provider = provider;
this.p1 = p1.getProperty();
}
@Override
public Fun.Tuple1<A> apply(Row row) {
return new Fun.Tuple1<A>(provider.getColumnValue(row, 0, p1));
}
}
public final static class Mapper2<A, B> implements Function<Row, Fun.Tuple2<A, B>> {
private final ColumnValueProvider provider;
private final CasserProperty p1;
private final CasserProperty p2;
public Mapper2(ColumnValueProvider provider,
CasserPropertyNode p1,
CasserPropertyNode p2) {
this.provider = provider;
this.p1 = p1.getProperty();
this.p2 = p2.getProperty();
}
@Override
public Fun.Tuple2<A, B> apply(Row row) {
return new Fun.Tuple2<A, B>(
provider.getColumnValue(row, 0, p1),
provider.getColumnValue(row, 1, p2));
}
}
public final static class Mapper3<A, B, C> implements Function<Row, Fun.Tuple3<A, B, C>> {
private final ColumnValueProvider provider;
private final CasserProperty p1;
private final CasserProperty p2;
private final CasserProperty p3;
public Mapper3(ColumnValueProvider provider,
CasserPropertyNode p1,
CasserPropertyNode p2,
CasserPropertyNode p3) {
this.provider = provider;
this.p1 = p1.getProperty();
this.p2 = p2.getProperty();
this.p3 = p3.getProperty();
}
@Override
public Fun.Tuple3<A, B, C> apply(Row row) {
return new Fun.Tuple3<A, B, C>(
provider.getColumnValue(row, 0, p1),
provider.getColumnValue(row, 1, p2),
provider.getColumnValue(row, 2, p3)
);
}
}
public final static class Mapper4<A, B, C, D> implements Function<Row, Fun.Tuple4<A, B, C, D>> {
private final ColumnValueProvider provider;
private final CasserProperty p1;
private final CasserProperty p2;
private final CasserProperty p3;
private final CasserProperty p4;
public Mapper4(ColumnValueProvider provider,
CasserPropertyNode p1,
CasserPropertyNode p2,
CasserPropertyNode p3,
CasserPropertyNode p4
) {
this.provider = provider;
this.p1 = p1.getProperty();
this.p2 = p2.getProperty();
this.p3 = p3.getProperty();
this.p4 = p4.getProperty();
}
@Override
public Fun.Tuple4<A, B, C, D> apply(Row row) {
return new Fun.Tuple4<A, B, C, D>(
provider.getColumnValue(row, 0, p1),
provider.getColumnValue(row, 1, p2),
provider.getColumnValue(row, 2, p3),
provider.getColumnValue(row, 3, p4)
);
}
}
public final static class Mapper5<A, B, C, D, E> implements Function<Row, Fun.Tuple5<A, B, C, D, E>> {
private final ColumnValueProvider provider;
private final CasserProperty p1, p2, p3, p4, p5;
public Mapper5(ColumnValueProvider provider,
CasserPropertyNode p1,
CasserPropertyNode p2,
CasserPropertyNode p3,
CasserPropertyNode p4,
CasserPropertyNode p5
) {
this.provider = provider;
this.p1 = p1.getProperty();
this.p2 = p2.getProperty();
this.p3 = p3.getProperty();
this.p4 = p4.getProperty();
this.p5 = p5.getProperty();
}
@Override
public Fun.Tuple5<A, B, C, D, E> apply(Row row) {
return new Fun.Tuple5<A, B, C, D, E>(
provider.getColumnValue(row, 0, p1),
provider.getColumnValue(row, 1, p2),
provider.getColumnValue(row, 2, p3),
provider.getColumnValue(row, 3, p4),
provider.getColumnValue(row, 4, p5)
);
}
}
public final static class Mapper6<A, B, C, D, E, F> implements
Function<Row,
Fun.Tuple6<A, B, C, D, E, F>> {
private final ColumnValueProvider provider;
private final CasserProperty p1, p2, p3, p4, p5, p6;
public Mapper6(ColumnValueProvider provider,
CasserPropertyNode p1,
CasserPropertyNode p2,
CasserPropertyNode p3,
CasserPropertyNode p4,
CasserPropertyNode p5,
CasserPropertyNode p6
) {
this.provider = provider;
this.p1 = p1.getProperty();
this.p2 = p2.getProperty();
this.p3 = p3.getProperty();
this.p4 = p4.getProperty();
this.p5 = p5.getProperty();
this.p6 = p6.getProperty();
}
@Override
public Fun.Tuple6<A, B, C, D, E, F> apply(Row row) {
return new Fun.Tuple6<A, B, C, D, E, F>(
provider.getColumnValue(row, 0, p1),
provider.getColumnValue(row, 1, p2),
provider.getColumnValue(row, 2, p3),
provider.getColumnValue(row, 3, p4),
provider.getColumnValue(row, 4, p5),
provider.getColumnValue(row, 5, p6)
);
}
}
public final static class Mapper7<A, B, C, D, E, F, G> implements
Function<Row,
Fun.Tuple7<A, B, C, D, E, F, G>> {
private final ColumnValueProvider provider;
private final CasserProperty p1, p2, p3, p4, p5, p6, p7;
public Mapper7(ColumnValueProvider provider,
CasserPropertyNode p1,
CasserPropertyNode p2,
CasserPropertyNode p3,
CasserPropertyNode p4,
CasserPropertyNode p5,
CasserPropertyNode p6,
CasserPropertyNode p7
) {
this.provider = provider;
this.p1 = p1.getProperty();
this.p2 = p2.getProperty();
this.p3 = p3.getProperty();
this.p4 = p4.getProperty();
this.p5 = p5.getProperty();
this.p6 = p6.getProperty();
this.p7 = p7.getProperty();
}
@Override
public Fun.Tuple7<A, B, C, D, E, F, G> apply(Row row) {
return new Fun.Tuple7<A, B, C, D, E, F, G>(
provider.getColumnValue(row, 0, p1),
provider.getColumnValue(row, 1, p2),
provider.getColumnValue(row, 2, p3),
provider.getColumnValue(row, 3, p4),
provider.getColumnValue(row, 4, p5),
provider.getColumnValue(row, 5, p6),
provider.getColumnValue(row, 6, p7)
);
}
}
}

View file

@ -1,104 +0,0 @@
/*
* Copyright (C) 2015 The Casser Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core;
import java.util.Arrays;
import com.datastax.driver.core.querybuilder.Clause;
import com.datastax.driver.core.querybuilder.QueryBuilder;
import com.noorq.casser.core.reflect.CasserPropertyNode;
import com.noorq.casser.mapping.value.ColumnValuePreparer;
import com.noorq.casser.support.CasserMappingException;
public final class Postulate<V> {
private final Operator operator;
private final V[] values;
protected Postulate(Operator op, V[] values) {
this.operator = op;
this.values = values;
}
public static <V> Postulate<V> of(Operator op, V... values) {
return new Postulate<V>(op, values);
}
public Clause getClause(CasserPropertyNode node, ColumnValuePreparer valuePreparer) {
switch(operator) {
case EQ:
return QueryBuilder.eq(node.getColumnName(),
valuePreparer.prepareColumnValue(values[0], node.getProperty()));
case IN:
Object[] preparedValues = new Object[values.length];
for (int i = 0; i != values.length; ++i) {
preparedValues[i] = valuePreparer.prepareColumnValue(values[i], node.getProperty());
}
return QueryBuilder.in(node.getColumnName(), preparedValues);
case LT:
return QueryBuilder.lt(node.getColumnName(),
valuePreparer.prepareColumnValue(values[0], node.getProperty()));
case LTE:
return QueryBuilder.lte(node.getColumnName(),
valuePreparer.prepareColumnValue(values[0], node.getProperty()));
case GT:
return QueryBuilder.gt(node.getColumnName(),
valuePreparer.prepareColumnValue(values[0], node.getProperty()));
case GTE:
return QueryBuilder.gte(node.getColumnName(),
valuePreparer.prepareColumnValue(values[0], node.getProperty()));
default:
throw new CasserMappingException("unknown filter operation " + operator);
}
}
@Override
public String toString() {
if (operator == Operator.IN) {
if (values == null) {
return "in()";
}
int len = values.length;
StringBuilder b = new StringBuilder();
b.append("in(");
for (int i = 0; i != len; i++) {
if (b.length() > 3) {
b.append(", ");
}
b.append(String.valueOf(values[i]));
}
return b.append(')').toString();
}
return operator.getName() + values[0];
}
}

View file

@ -1,378 +0,0 @@
/*
* Copyright (C) 2015 The Casser Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import com.datastax.driver.core.ColumnMetadata;
import com.datastax.driver.core.ColumnMetadata.IndexMetadata;
import com.datastax.driver.core.DataType;
import com.datastax.driver.core.RegularStatement;
import com.datastax.driver.core.SimpleStatement;
import com.datastax.driver.core.TableMetadata;
import com.datastax.driver.core.UserType;
import com.datastax.driver.core.schemabuilder.Alter;
import com.datastax.driver.core.schemabuilder.Create;
import com.datastax.driver.core.schemabuilder.Create.Options;
import com.datastax.driver.core.schemabuilder.CreateType;
import com.datastax.driver.core.schemabuilder.SchemaBuilder;
import com.datastax.driver.core.schemabuilder.SchemaStatement;
import com.noorq.casser.mapping.CasserEntity;
import com.noorq.casser.mapping.CasserEntityType;
import com.noorq.casser.mapping.CasserProperty;
import com.noorq.casser.mapping.ColumnType;
import com.noorq.casser.mapping.OrderingDirection;
import com.noorq.casser.mapping.type.OptionalColumnMetadata;
import com.noorq.casser.support.CasserMappingException;
import com.noorq.casser.support.CqlUtil;
public final class SchemaUtil {
private SchemaUtil() {
}
public static RegularStatement use(String keyspace, boolean forceQuote) {
if (forceQuote) {
return new SimpleStatement("USE" + CqlUtil.forceQuote(keyspace));
}
else {
return new SimpleStatement("USE " + keyspace);
}
}
public static SchemaStatement createUserType(CasserEntity entity) {
if (entity.getType() != CasserEntityType.UDT) {
throw new CasserMappingException("expected UDT entity " + entity);
}
CreateType create = SchemaBuilder.createType(entity.getName().toCql());
for (CasserProperty prop : entity.getOrderedProperties()) {
ColumnType columnType = prop.getColumnType();
if (columnType == ColumnType.PARTITION_KEY || columnType == ColumnType.CLUSTERING_COLUMN) {
throw new CasserMappingException("primary key columns are not supported in UserDefinedType for " + prop.getPropertyName() + " in entity " + entity);
}
try {
prop.getDataType().addColumn(create, prop.getColumnName());
}
catch(IllegalArgumentException e) {
throw new CasserMappingException("invalid column name '" + prop.getColumnName() + "' in entity '" + entity.getName().getName() + "'", e);
}
}
return create;
}
public static List<SchemaStatement> alterUserType(UserType userType,
CasserEntity entity, boolean dropUnusedColumns) {
if (entity.getType() != CasserEntityType.UDT) {
throw new CasserMappingException("expected UDT entity " + entity);
}
List<SchemaStatement> result = new ArrayList<SchemaStatement>();
/**
* TODO: In future replace SchemaBuilder.alterTable by SchemaBuilder.alterType when it will exist
*/
Alter alter = SchemaBuilder.alterTable(entity.getName().toCql());
final Set<String> visitedColumns = dropUnusedColumns ? new HashSet<String>()
: Collections.<String> emptySet();
for (CasserProperty prop : entity.getOrderedProperties()) {
String columnName = prop.getColumnName().getName();
if (dropUnusedColumns) {
visitedColumns.add(columnName);
}
ColumnType columnType = prop.getColumnType();
if (columnType == ColumnType.PARTITION_KEY || columnType == ColumnType.CLUSTERING_COLUMN) {
continue;
}
DataType dataType = userType.getFieldType(columnName);
SchemaStatement stmt = prop.getDataType().alterColumn(alter, prop.getColumnName(), optional(columnName, dataType));
if (stmt != null) {
result.add(stmt);
}
}
if (dropUnusedColumns) {
for (String field : userType.getFieldNames()) {
if (!visitedColumns.contains(field)) {
result.add(alter.dropColumn(field));
}
}
}
return result;
}
public static SchemaStatement dropUserType(CasserEntity entity) {
if (entity.getType() != CasserEntityType.UDT) {
throw new CasserMappingException("expected UDT entity " + entity);
}
return SchemaBuilder.dropType(entity.getName().toCql());
}
public static SchemaStatement createTable(CasserEntity entity) {
if (entity.getType() != CasserEntityType.TABLE) {
throw new CasserMappingException("expected table entity " + entity);
}
Create create = SchemaBuilder.createTable(entity.getName().toCql());
create.ifNotExists();
List<CasserProperty> clusteringColumns = new ArrayList<CasserProperty>();
for (CasserProperty prop : entity.getOrderedProperties()) {
ColumnType columnType = prop.getColumnType();
if (columnType == ColumnType.CLUSTERING_COLUMN) {
clusteringColumns.add(prop);
}
prop.getDataType().addColumn(create, prop.getColumnName());
}
if (!clusteringColumns.isEmpty()) {
Options options = create.withOptions();
clusteringColumns.forEach(p -> options.clusteringOrder(p.getColumnName().toCql(), mapDirection(p.getOrdering())));
}
return create;
}
public static List<SchemaStatement> alterTable(TableMetadata tmd,
CasserEntity entity, boolean dropUnusedColumns) {
if (entity.getType() != CasserEntityType.TABLE) {
throw new CasserMappingException("expected table entity " + entity);
}
List<SchemaStatement> result = new ArrayList<SchemaStatement>();
Alter alter = SchemaBuilder.alterTable(entity.getName().toCql());
final Set<String> visitedColumns = dropUnusedColumns ? new HashSet<String>()
: Collections.<String> emptySet();
for (CasserProperty prop : entity.getOrderedProperties()) {
String columnName = prop.getColumnName().getName();
if (dropUnusedColumns) {
visitedColumns.add(columnName);
}
ColumnType columnType = prop.getColumnType();
if (columnType == ColumnType.PARTITION_KEY || columnType == ColumnType.CLUSTERING_COLUMN) {
continue;
}
ColumnMetadata columnMetadata = tmd.getColumn(columnName);
SchemaStatement stmt = prop.getDataType().alterColumn(alter, prop.getColumnName(), optional(columnMetadata));
if (stmt != null) {
result.add(stmt);
}
}
if (dropUnusedColumns) {
for (ColumnMetadata cm : tmd.getColumns()) {
if (!visitedColumns.contains(cm.getName())) {
result.add(alter.dropColumn(cm.getName()));
}
}
}
return result;
}
public static SchemaStatement dropTable(CasserEntity entity) {
if (entity.getType() != CasserEntityType.TABLE) {
throw new CasserMappingException("expected table entity " + entity);
}
return SchemaBuilder.dropTable(entity.getName().toCql()).ifExists();
}
public static SchemaStatement createIndex(CasserProperty prop) {
return SchemaBuilder.createIndex(prop.getIndexName().get().toCql())
.ifNotExists()
.onTable(prop.getEntity().getName().toCql())
.andColumn(prop.getColumnName().toCql());
}
public static List<SchemaStatement> createIndexes(CasserEntity entity) {
return entity.getOrderedProperties().stream()
.filter(p -> p.getIndexName().isPresent())
.map(p -> SchemaUtil.createIndex(p))
.collect(Collectors.toList());
}
public static List<SchemaStatement> alterIndexes(TableMetadata tmd,
CasserEntity entity, boolean dropUnusedIndexes) {
List<SchemaStatement> list = new ArrayList<SchemaStatement>();
final Set<String> visitedColumns = dropUnusedIndexes ? new HashSet<String>()
: Collections.<String> emptySet();
entity
.getOrderedProperties()
.stream()
.filter(p -> p.getIndexName().isPresent())
.forEach(p -> {
String columnName = p.getColumnName().getName();
if (dropUnusedIndexes) {
visitedColumns.add(columnName);
}
ColumnMetadata cm = tmd.getColumn(columnName);
if (cm != null) {
IndexMetadata im = cm.getIndex();
if (im == null) {
list.add(createIndex(p));
}
}
else {
list.add(createIndex(p));
}
});
if (dropUnusedIndexes) {
tmd
.getColumns()
.stream()
.filter(c -> c.getIndex() != null && !visitedColumns.contains(c.getName()))
.forEach(c -> {
list.add(SchemaBuilder.dropIndex(c.getIndex().getName()).ifExists());
});
}
return list;
}
public static SchemaStatement dropIndex(CasserProperty prop) {
return SchemaBuilder.dropIndex(prop.getIndexName().get().toCql()).ifExists();
}
private static SchemaBuilder.Direction mapDirection(OrderingDirection o) {
switch(o) {
case ASC:
return SchemaBuilder.Direction.ASC;
case DESC:
return SchemaBuilder.Direction.DESC;
}
throw new CasserMappingException("unknown ordering " + o);
}
public static void throwNoMapping(CasserProperty prop) {
throw new CasserMappingException(
"only primitive types and Set,List,Map collections and UserDefinedTypes are allowed, unknown type for property '" + prop.getPropertyName()
+ "' type is '" + prop.getJavaType() + "' in the entity " + prop.getEntity());
}
private static OptionalColumnMetadata optional(final ColumnMetadata columnMetadata) {
if (columnMetadata != null) {
return new OptionalColumnMetadata() {
@Override
public String getName() {
return columnMetadata.getName();
}
@Override
public DataType getType() {
return columnMetadata.getType();
}
};
}
return null;
}
private static OptionalColumnMetadata optional(final String name, final DataType dataType) {
if (dataType != null) {
return new OptionalColumnMetadata() {
@Override
public String getName() {
return name;
}
@Override
public DataType getType() {
return dataType;
}
};
}
return null;
}
}

View file

@ -1,244 +0,0 @@
/*
*
* Copyright (C) 2015 The Casser Authors
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.mapping;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.noorq.casser.config.CasserSettings;
import com.noorq.casser.core.Casser;
import com.noorq.casser.mapping.annotation.Table;
import com.noorq.casser.mapping.annotation.Tuple;
import com.noorq.casser.mapping.annotation.UDT;
import com.noorq.casser.support.CasserMappingException;
public final class CasserMappingEntity implements CasserEntity {
private final Class<?> iface;
private final CasserEntityType type;
private final IdentityName name;
private final ImmutableMap<String, CasserProperty> props;
private final ImmutableList<CasserProperty> orderedProps;
public CasserMappingEntity(Class<?> iface) {
this(iface, autoDetectType(iface));
}
public CasserMappingEntity(Class<?> iface, CasserEntityType type) {
if (iface == null || !iface.isInterface()) {
throw new IllegalArgumentException("invalid parameter " + iface);
}
this.iface = iface;
this.type = Objects.requireNonNull(type, "type is empty");
this.name = resolveName(iface, type);
CasserSettings settings = Casser.settings();
Method[] all = iface.getDeclaredMethods();
List<CasserProperty> propsLocal = new ArrayList<CasserProperty>();
ImmutableMap.Builder<String, CasserProperty> propsBuilder = ImmutableMap.builder();
for (Method m : all) {
if (settings.getGetterMethodDetector().apply(m)) {
CasserProperty prop = new CasserMappingProperty(this, m);
propsBuilder.put(prop.getPropertyName(), prop);
propsLocal.add(prop);
}
}
this.props = propsBuilder.build();
Collections.sort(propsLocal, TypeAndOrdinalColumnComparator.INSTANCE);
this.orderedProps = ImmutableList.copyOf(propsLocal);
validateOrdinals();
}
@Override
public CasserEntityType getType() {
return type;
}
@Override
public Class<?> getMappingInterface() {
return iface;
}
@Override
public Collection<CasserProperty> getOrderedProperties() {
return orderedProps;
}
@Override
public CasserProperty getProperty(String name) {
return props.get(name);
}
@Override
public IdentityName getName() {
return name;
}
private static IdentityName resolveName(Class<?> iface, CasserEntityType type) {
switch(type) {
case TABLE:
return MappingUtil.getTableName(iface, true);
case TUPLE:
return IdentityName.of(MappingUtil.getDefaultEntityName(iface), false);
case UDT:
return MappingUtil.getUserDefinedTypeName(iface, true);
}
throw new CasserMappingException("invalid entity type " + type + " in " + type);
}
private static CasserEntityType autoDetectType(Class<?> iface) {
Objects.requireNonNull(iface, "empty iface");
if (null != iface.getDeclaredAnnotation(Table.class)) {
return CasserEntityType.TABLE;
}
else if (null != iface.getDeclaredAnnotation(Tuple.class)) {
return CasserEntityType.TUPLE;
}
else if (null != iface.getDeclaredAnnotation(UDT.class)) {
return CasserEntityType.UDT;
}
throw new CasserMappingException("entity must be annotated by @Table or @Tuple or @UserDefinedType " + iface);
}
private void validateOrdinals() {
switch(getType()) {
case TABLE:
validateOrdinalsForTable();
break;
case TUPLE:
validateOrdinalsInTuple();
break;
default:
break;
}
}
private void validateOrdinalsForTable() {
BitSet partitionKeys = new BitSet();
BitSet clusteringColumns = new BitSet();
for (CasserProperty prop : getOrderedProperties()) {
ColumnType type = prop.getColumnType();
int ordinal = prop.getOrdinal();
switch(type) {
case PARTITION_KEY:
if (partitionKeys.get(ordinal)) {
throw new CasserMappingException("detected two or more partition key columns with the same ordinal " + ordinal + " in " + prop.getEntity());
}
partitionKeys.set(ordinal);
break;
case CLUSTERING_COLUMN:
if (clusteringColumns.get(ordinal)) {
throw new CasserMappingException("detected two or clustering columns with the same ordinal " + ordinal + " in " + prop.getEntity());
}
clusteringColumns.set(ordinal);
break;
default:
break;
}
}
}
private void validateOrdinalsInTuple() {
boolean[] ordinals = new boolean[props.size()];
getOrderedProperties().forEach(p -> {
int ordinal = p.getOrdinal();
if (ordinal < 0 || ordinal >= ordinals.length) {
throw new CasserMappingException("invalid ordinal " + ordinal + " found for property " + p.getPropertyName() + " in " + p.getEntity());
}
if (ordinals[ordinal]) {
throw new CasserMappingException("detected two or more properties with the same ordinal " + ordinal + " in " + p.getEntity());
}
ordinals[ordinal] = true;
});
for (int i = 0; i != ordinals.length; ++i) {
if (!ordinals[i]) {
throw new CasserMappingException("detected absent ordinal " + i + " in " + this);
}
}
}
@Override
public String toString() {
StringBuilder str = new StringBuilder();
str.append(iface.getSimpleName())
.append("(").append(name.getName()).append(") ")
.append(type.name().toLowerCase())
.append(":\n");
for (CasserProperty prop : getOrderedProperties()) {
str.append(prop.toString());
str.append("\n");
}
return str.toString();
}
}

View file

@ -1,115 +0,0 @@
/*
* Copyright (C) 2015 The Casser Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.mapping.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import com.noorq.casser.mapping.OrderingDirection;
/**
* ClusteringColumn is the family column in legacy Cassandra API
*
* The purpose of this column is have additional dimension in the table.
* Both @PartitionKey and @ClusteringColumn together are parts of the primary key of the table.
* The primary difference between them is that the first one is using for routing purposes
* in order to locate a data node in the cluster, otherwise the second one is using
* inside the node to locate peace of data in concrete machine.
*
* ClusteringColumn can be represented as a Key in SortedMap that fully stored in a single node.
* All developers must be careful for selecting fields for clustering columns, because all data
* inside this SortedMap must fit in to one node.
*
* ClusteringColumn can have more than one part and the order of parts is important.
* This order defines the way how Cassandra joins the parts and influence of data retrieval
* operations. Each part can have ordering property that defines default ascending or descending order
* of data. In case of two and more parts in select queries developer needs to have consisdent
* order of all parts as they defined in table.
*
* For example, first part is ASC ordering, second is also ASC, so Cassandra will sort entries like this:
* a-a
* a-b
* b-a
* b-b
* In this case we are able run queries:
* ORDER BY first ASC, second ASC
* ORDER BY first DESC, second DESC
* WHERE first=? ORDER BY second ASC
* WHERE first=? ORDER BY second DESC
* WHERE first=? AND second=?
*
* But, we can not run queries:
* ORDER BY first DESC, second ASC
* ORDER BY first ASC, second DESC
* WHERE second=? ORDER BY first (ASC,DESC)
*
* @author Alex Shvid
*
*/
@Retention(value = RetentionPolicy.RUNTIME)
@Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
public @interface ClusteringColumn {
/**
* Default value is the name of the method normalized to underscore
*
* @return name of the column
*/
String value() default "";
/**
* ClusteringColumn parts must be ordered in the @Table. It is the requirement of Cassandra.
* Cassandra joins all parts to the final clustering key that is stored in column family name.
* Additionally all parts can have some ordering (ASC, DESC) that with sequence of parts
* determines key comparison function, so Cassandra storing column family names always in sorted order.
*
* Be default ordinal has 0 value, that's because in most cases @Table have single column for ClusteringColumn
* If you have 2 and more parts of the ClusteringColumn, then you need to use ordinal() to
* define the sequence of the parts
*
* @return number that used to sort clustering columns
*/
int ordinal() default 0;
/**
* Default order of values in the ClusteringColumn
* This ordering is using for comparison of the clustering column values when Cassandra stores it in the
* sorted order.
*
* Default value is the ascending order
*
* @return ascending order or descending order of clustering column values
*/
OrderingDirection ordering() default OrderingDirection.ASC;
/**
* For reserved words in Cassandra we need quotation in CQL queries. This property marks that
* the name of the UDT type needs to be quoted.
*
* Default value is false, we are quoting only selected names.
*
* @return true if name have to be quoted
*/
boolean forceQuote() default false;
}

View file

@ -1,277 +0,0 @@
/*
* Copyright (C) 2015 The Casser Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.mapping.annotation;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import javax.validation.Constraint;
import com.noorq.casser.mapping.validator.AlphabetValidator;
import com.noorq.casser.mapping.validator.EmailValidator;
import com.noorq.casser.mapping.validator.LengthValidator;
import com.noorq.casser.mapping.validator.LowerCaseValidator;
import com.noorq.casser.mapping.validator.MaxLengthValidator;
import com.noorq.casser.mapping.validator.MinLengthValidator;
import com.noorq.casser.mapping.validator.NotEmptyValidator;
import com.noorq.casser.mapping.validator.NotNullValidator;
import com.noorq.casser.mapping.validator.NumberValidator;
import com.noorq.casser.mapping.validator.PatternValidator;
import com.noorq.casser.mapping.validator.UpperCaseValidator;
/**
* Constraint annotations are using for data integrity mostly for @java.lang.String types.
* The place of the annotation is the particular method in model interface.
*
* All of them does not have effect on selects and data retrieval operations.
*
* Support types:
* - @NotNull supports any @java.lang.Object type
* - All annotations support @java.lang.String type
*
* @author Alex Shvid
*
*/
public final class Constraints {
private Constraints() {
}
/**
* NotNull annotation is using to check that value is not null before storing it
*
* Applicable to use in any @java.lang.Object
*
* It does not check on selects and data retrieval operations
*
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
@Constraint(validatedBy = NotNullValidator.class)
public @interface NotNull {
}
/**
* NotEmpty annotation is using to check that value has text before storing it
*
* Also checks for the null and it is more strict annotation then @NotNull
*
* Can be used for @java.lang.CharSequence, @ByteBuffer and any array
*
* It does not check on selects and data retrieval operations
*
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
@Constraint(validatedBy = NotEmptyValidator.class)
public @interface NotEmpty {
}
/**
* Email annotation is using to check that value has a valid email before storing it
*
* Can be used only for @CharSequence
*
* It does not check on selects and data retrieval operations
*
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
@Constraint(validatedBy = EmailValidator.class)
public @interface Email {
}
/**
* Number annotation is using to check that all letters in value are digits before storing it
*
* Can be used only for @java.lang.CharSequence
*
* It does not check on selects and data retrieval operations
*
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
@Constraint(validatedBy = NumberValidator.class)
public @interface Number {
}
/**
* Alphabet annotation is using to check that all letters in value are in specific alphabet before storing it
*
* Can be used only for @java.lang.CharSequence
*
* It does not check on selects and data retrieval operations
*
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
@Constraint(validatedBy = AlphabetValidator.class)
public @interface Alphabet {
/**
* Defines alphabet that will be used to check value
*
* @return alphabet characters in the string
*/
String value();
}
/**
* Length annotation is using to ensure that value has exact length before storing it
*
* Can be used for @java.lang.CharSequence, @ByteBuffer and any array
*
* It does not have effect on selects and data retrieval operations
*
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
@Constraint(validatedBy = LengthValidator.class)
public @interface Length {
int value();
}
/**
* MaxLength annotation is using to ensure that value has length less or equal to some threshold before storing it
*
* Can be used for @java.lang.CharSequence, @ByteBuffer and byte[]
*
* It does not have effect on selects and data retrieval operations
*
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
@Constraint(validatedBy = MaxLengthValidator.class)
public @interface MaxLength {
int value();
}
/**
* MinLength annotation is using to ensure that value has length greater or equal to some threshold before storing it
*
* Can be used for @java.lang.CharSequence, @ByteBuffer and byte[]
*
* It does not have effect on selects and data retrieval operations
*
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
@Constraint(validatedBy = MinLengthValidator.class)
public @interface MinLength {
int value();
}
/**
* LowerCase annotation is using to ensure that value is in lower case before storing it
*
* Can be used only for @java.lang.CharSequence
*
* It does not have effect on selects and data retrieval operations
*
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
@Constraint(validatedBy = LowerCaseValidator.class)
public @interface LowerCase {
}
/**
* UpperCase annotation is using to ensure that value is in upper case before storing it
*
* Can be used only for @java.lang.CharSequence
*
* It does not have effect on selects and data retrieval operations
*
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
@Constraint(validatedBy = UpperCaseValidator.class)
public @interface UpperCase {
}
/**
* Pattern annotation is LowerCase annotation is using to ensure that value is upper case before storing it
*
* Can be used only for @java.lang.CharSequence
*
* It does not have effect on selects and data retrieval operations
*
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
@Constraint(validatedBy = PatternValidator.class)
public @interface Pattern {
/**
* User defined regex expression to check match of the value
*
* @return Java regex pattern
*/
String value();
/**
* Regex flags composition
*
* @return Java regex flags
*/
int flags();
}
}

View file

@ -1,532 +0,0 @@
/*
* Copyright (C) 2015 The Casser Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.mapping.annotation;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import com.datastax.driver.core.DataType;
/**
* Types annotations are using for clarification of Cassandra data type for particular Java type.
*
* Sometimes it is possible to have for single Java type multiple Cassandra data types:
* - @String can be @DataType.Name.ASCII or @DataType.Name.TEXT or @DataType.Name.VARCHAR
* - @Long can be @DataType.Name.BIGINT or @DataType.Name.COUNTER
*
* All those type annotations simplify mapping between Java types and Cassandra data types.
* They are not required, for each Java type there is a default Cassandra data type in Casser, but in some
* cases you would like to control mapping to make sure that the right Cassandra data type is using.
*
* For complex types like collections, UDF and Tuple types all those annotations are using to
* clarify the sub-type(s) or class/UDF names.
*
* Has significant effect on schema operations.
*
* @author Alex Shvid
*
*/
public final class Types {
private Types() {
}
/**
* Says to use @DataType.Name.ASCII data type in schema
* Java type is @String
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
public @interface Ascii {
}
/**
* Says to use @DataType.Name.BIGINT data type in schema
* Java type is @Long
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
public @interface Bigint {
}
/**
* Says to use @DataType.Name.BLOB data type in schema
* Java type is @ByteBuffer or @byte[]
* Using by default
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
public @interface Blob {
}
/**
* Says to use @DataType.Name.LIST data type in schema with specific sub-type
* Java type is @List
*
* Casser does not allow to use a specific implementation of the collection thereof data retrieval operation
* result can be a collection with another implementation.
*
* This annotation is usually used only for sub-types clarification and only in case if sub-type is Java type that
* corresponds to multiple Cassandra data types.
*
* For this type there are special operations: prepend, prependAll, setIdx, append, appendAll, discard and discardAll in @UpdateOperation
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
public @interface List {
/**
* Clarification of using the sub-type data type in the collection.
* It supports only simple data type (not Collection, UDT or Tuple)
*
* In case if you need UDT sub-type in the list, consider @UDTList annotation
*
* @return data type name of the value
*/
DataType.Name value();
}
/**
* Says to use @DataType.Name.MAP data type in schema with specific sub-types
* Java type is @Map
*
* Casser does not allow to use a specific implementation of the collection thereof data retrieval operation
* result can be a collection with another implementation.
*
* This annotation is usually used only for sub-types clarification and only in case if sub-type is Java type that
* corresponds to multiple Cassandra data types.
*
* For this type there are special operations: put and putAll in @UpdateOperation.
*
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
public @interface Map {
/**
* Clarification of using the sub-type data type in the collection.
* It supports only simple data type (not Collection, UDT or Tuple)
*
* In case if you need UDT key sub-type in the map, consider @UDTKeyMap or @UDTMap annotations
*
* @return data type name of the key
*/
DataType.Name key();
/**
* Clarification of using the sub-type data type in the collection.
* It supports only simple data type (not Collection, UDT or Tuple)
*
* In case if you need UDT value sub-type in the map, consider @UDTValueMap or @UDTMap annotations
*
* @return data type name of the value
*/
DataType.Name value();
}
/**
* Says to use @DataType.Name.COUNTER type in schema
* Java type is @Long
*
* For this type there are special operations: increment and decrement in @UpdateOperation.
* You do not need to initialize counter value, it will be done automatically by Cassandra.
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
public @interface Counter {
}
/**
* Says to use @DataType.Name.SET data type in schema with specific sub-type
* Java type is @Set
*
* Casser does not allow to use a specific implementation of the collection thereof data retrieval operation
* result can be a collection with another implementation.
*
* This annotation is usually used only for sub-types clarification and only in case if sub-type is Java type that
* corresponds to multiple Cassandra data types.
*
* For this type there are special operations: add, addAll, remove and removeAll in @UpdateOperation.
*
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
public @interface Set {
/**
* Clarification of using the sub-type data type in the collection.
* It supports only simple data type (not Collection, UDT or Tuple)
*
* In case if you need UDT sub-type in the set, consider @UDTSet annotation
*
* @return data type name of the value
*/
DataType.Name value();
}
/**
* Says to use @DataType.Name.CUSTOM type in schema
* Java type is @ByteBuffer or @byte[]
*
* Uses for custom user types that has special implementation.
* Casser does not deal with this class directly for now, uses only in serialized form.
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
public @interface Custom {
/**
* Class name of the custom user type that is implementation of the type
*
* @return class name of the custom type implementation
*/
String className();
}
/**
* Says to use @DataType.Name.TEXT type in schema
* Java type is @String
* Using by default
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
public @interface Text {
}
/**
* Says to use @DataType.Name.TIMESTAMP type in schema
* Java type is @Date
* Using by default
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
public @interface Timestamp {
}
/**
* Says to use @DataType.Name.TIMEUUID type in schema
* Java type is @UUID or @Date
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
public @interface Timeuuid {
}
/**
* Says to use @DataType.Name.TUPLE type in schema
* Java type is @TupleValue or model interface with @Tuple annotation
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
public @interface Tuple {
/**
* If Java type is the @TupleValue then this field is required.
* Any Cassandra Tuple is the sequence of Cassandra types.
* For now Casser supports only simple data types in tuples for @TupleValue Java type
*
* In case if Java type is the model interface with @Tuple annotation then
* all methods in this interface can have Types annotations that can be complex types as well.
*
* @return data type name sequence
*/
DataType.Name[] value() default {};
}
/**
* Says to use @DataType.Name.UDT type in schema
* Java type is @UDTValue or model interface with @UDT annotation
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
public @interface UDT {
/**
* If Java type is the @UDTValue then this field is required.
* Any Cassandra UDT has name and must be created before this use as a Cassandra Type.
*
* This value is the UDT name of the Cassandra Type that was already created in the schema
*
* In case of Java type is the model interface with @UDT annotation then
* this field is not using since model interface defines UserDefinedType with specific name
*
* @return UDT name
*/
String value() default "";
/**
* Only used for JavaType @UDTValue
*
* In case if value() method returns reserved word that can not be used as a name of UDT then
* forceQuote will add additional quotes around this name in all CQL queries.
*
* Default value is false.
*
* @return true if quotation is needed
*/
boolean forceQuote() default false;
}
/**
* Says to use @DataType.Name.MAP data type in schema with specific UDT sub-type as a key and simple sub-type as a value
* Java type is @Map
*
* Casser does not allow to use a specific implementation of the collection thereof data retrieval operation
* result can be a collection with another implementation.
*
* This annotation is usually used only for sub-types clarification and only in case if sub-type is Java type that
* corresponds to multiple Cassandra data types.
*
* For this type there are special operations: put and putAll in @UpdateOperation.
*
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
public @interface UDTKeyMap {
/**
* Clarification of using the UDT data type as a key sub-type in the collection.
*
* @return annotation of UDT type
*/
UDT key();
/**
* Clarification of using the sub-type data type in the collection.
* It supports only simple data type (not Collection, UDT or Tuple)
*
* In case if you need UDT value sub-type in the map, consider @UDTMap annotations
*
* @return data type name of the value
*/
DataType.Name value();
}
/**
* Says to use @DataType.Name.LIST data type in schema with specific UDT sub-type
* Java type is @List
*
* Casser does not allow to use a specific implementation of the collection thereof data retrieval operation
* result can be a collection with another implementation.
*
* This annotation is usually used only for sub-types clarification and only in case if sub-type is Java type that
* corresponds to multiple Cassandra data types.
*
* For this type there are special operations: prepend, prependAll, setIdx, append, appendAll, discard and discardAll in @UpdateOperation
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
public @interface UDTList {
/**
* Clarification of using the UDT data type as a sub-type in the collection.
*
* @return annotation of the UDT value
*/
UDT value();
}
/**
* Says to use @DataType.Name.MAP data type in schema with specific UDT sub-types
* Java type is @Map
*
* Casser does not allow to use a specific implementation of the collection thereof data retrieval operation
* result can be a collection with another implementation.
*
* This annotation is usually used only for sub-types clarification and only in case if sub-type is Java type that
* corresponds to multiple Cassandra data types.
*
* For this type there are special operations: put and putAll in @UpdateOperation.
*
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
public @interface UDTMap {
/**
* Clarification of using the UDT data type as a key sub-type in the collection.
*
* @return annotation of the UDT key
*/
UDT key();
/**
* Clarification of using the UDT data type as a value sub-type in the collection.
*
* @return annotation of the UDT value
*/
UDT value();
}
/**
* Says to use @DataType.Name.SET data type in schema with specific UDT sub-type
* Java type is @Set
*
* Casser does not allow to use a specific implementation of the collection thereof data retrieval operation
* result can be a collection with another implementation.
*
* This annotation is usually used only for sub-types clarification and only in case if sub-type is Java type that
* corresponds to multiple Cassandra data types.
*
* For this type there are special operations: add, addAll, remove and removeAll in @UpdateOperation.
*
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
public @interface UDTSet {
/**
* Clarification of using the UDT data type as a sub-type in the collection.
*
* @return annotation of the UDT value
*/
UDT value();
}
/**
* Says to use @DataType.Name.MAP data type in schema with specific simple sub-type as a key and UDT sub-type as a value
* Java type is @Map
*
* Casser does not allow to use a specific implementation of the collection thereof data retrieval operation
* result can be a collection with another implementation.
*
* This annotation is usually used only for sub-types clarification and only in case if sub-type is Java type that
* corresponds to multiple Cassandra data types.
*
* For this type there are special operations: put and putAll in @UpdateOperation.
*
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
public @interface UDTValueMap {
/**
* Clarification of using the sub-type data type in the collection.
* It supports only simple data type (not Collection, UDT or Tuple)
*
* In case if you need UDT key sub-type in the map, consider @UDTMap annotations
*
* @return data type name of the key
*/
DataType.Name key();
/**
* Clarification of using the UDT data type as a value sub-type in the collection.
*
* @return annotation of the UDT value
*/
UDT value();
}
/**
* Says to use @DataType.Name.UUID type in schema
* Java type is @UUID
* Using by default
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
public @interface Uuid {
}
/**
* Says to use @DataType.Name.VARCHAR type in schema
* Java type is @String
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
public @interface Varchar {
}
}

View file

@ -1,92 +0,0 @@
/*
* Copyright (C) 2015 The Casser Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.support;
import scala.concurrent.Future;
import scala.concurrent.impl.Promise.DefaultPromise;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
public final class Scala {
public static <T> Future<T> asFuture(ListenableFuture<T> future) {
final scala.concurrent.Promise<T> promise = new DefaultPromise<T>();
Futures.addCallback(future, new FutureCallback<T>() {
@Override public void onSuccess(T result) {
promise.success(result);
}
@Override public void onFailure(Throwable t) {
promise.failure(t);
}
});
return promise.future();
}
public static <T, A> Future<Fun.Tuple2<T, A>> asFuture(ListenableFuture<T> future, A a) {
final scala.concurrent.Promise<Fun.Tuple2<T, A>> promise = new DefaultPromise<Fun.Tuple2<T, A>>();
Futures.addCallback(future, new FutureCallback<T>() {
@Override public void onSuccess(T result) {
promise.success(new Fun.Tuple2<T, A>(result, a));
}
@Override public void onFailure(Throwable t) {
promise.failure(t);
}
});
return promise.future();
}
public static <T, A, B> Future<Fun.Tuple3<T, A, B>> asFuture(ListenableFuture<T> future, A a, B b) {
final scala.concurrent.Promise<Fun.Tuple3<T, A, B>> promise = new DefaultPromise<Fun.Tuple3<T, A, B>>();
Futures.addCallback(future, new FutureCallback<T>() {
@Override public void onSuccess(T result) {
promise.success(new Fun.Tuple3<T, A, B>(result, a, b));
}
@Override public void onFailure(Throwable t) {
promise.failure(t);
}
});
return promise.future();
}
public static <T, A, B, C> Future<Fun.Tuple4<T, A, B, C>> asFuture(ListenableFuture<T> future, A a, B b, C c) {
final scala.concurrent.Promise<Fun.Tuple4<T, A, B, C>> promise = new DefaultPromise<Fun.Tuple4<T, A, B, C>>();
Futures.addCallback(future, new FutureCallback<T>() {
@Override public void onSuccess(T result) {
promise.success(new Fun.Tuple4<T, A, B, C>(result, a, b, c));
}
@Override public void onFailure(Throwable t) {
promise.failure(t);
}
});
return promise.future();
}
public static <T, A, B, C, D> Future<Fun.Tuple5<T, A, B, C, D>> asFuture(ListenableFuture<T> future, A a, B b, C c, D d) {
final scala.concurrent.Promise<Fun.Tuple5<T, A, B, C, D>> promise = new DefaultPromise<Fun.Tuple5<T, A, B, C, D>>();
Futures.addCallback(future, new FutureCallback<T>() {
@Override public void onSuccess(T result) {
promise.success(new Fun.Tuple5<T, A, B, C, D>(result, a, b, c, d));
}
@Override public void onFailure(Throwable t) {
promise.failure(t);
}
});
return promise.future();
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,18 +13,18 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.config;
package net.helenus.config;
import java.lang.reflect.Method;
import java.util.function.Function;
import com.noorq.casser.core.DslInstantiator;
import com.noorq.casser.core.MapperInstantiator;
import com.noorq.casser.core.reflect.ReflectionDslInstantiator;
import com.noorq.casser.core.reflect.ReflectionMapperInstantiator;
import com.noorq.casser.mapping.convert.CamelCaseToUnderscoreConverter;
import net.helenus.core.DslInstantiator;
import net.helenus.core.MapperInstantiator;
import net.helenus.core.reflect.ReflectionDslInstantiator;
import net.helenus.core.reflect.ReflectionMapperInstantiator;
import net.helenus.mapping.convert.CamelCaseToUnderscoreConverter;
public class DefaultCasserSettings implements CasserSettings {
public class DefaultHelenusSettings implements HelenusSettings {
@Override
public Function<String, String> getPropertyToColumnConverter() {

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,7 +13,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.config;
package net.helenus.config;
import net.helenus.mapping.annotation.Transient;
import java.lang.reflect.Method;
import java.util.function.Function;
@ -21,20 +23,25 @@ import java.util.function.Function;
public enum GetterMethodDetector implements Function<Method, Boolean> {
INSTANCE;
@Override
public Boolean apply(Method method) {
if (method == null) {
throw new IllegalArgumentException("empty parameter");
}
if (method.getParameterCount() != 0 || method.getReturnType() == void.class) {
return false;
}
// Methods marked "Transient" are not mapped, skip them.
if (method.getDeclaredAnnotation(Transient.class) != null) {
return false;
}
return true;
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,22 +13,22 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.config;
package net.helenus.config;
import java.lang.reflect.Method;
import java.util.function.Function;
import com.noorq.casser.core.DslInstantiator;
import com.noorq.casser.core.MapperInstantiator;
import net.helenus.core.DslInstantiator;
import net.helenus.core.MapperInstantiator;
public interface CasserSettings {
public interface HelenusSettings {
Function<String, String> getPropertyToColumnConverter();
Function<Method, Boolean> getGetterMethodDetector();
DslInstantiator getDslInstantiator();
MapperInstantiator getMapperInstantiator();
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,141 +13,136 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core;
package net.helenus.core;
import java.io.PrintStream;
import java.util.concurrent.Executor;
import com.datastax.driver.core.schemabuilder.SchemaStatement;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.datastax.driver.core.PreparedStatement;
import com.datastax.driver.core.RegularStatement;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.ResultSetFuture;
import com.datastax.driver.core.Session;
import com.datastax.driver.core.Statement;
import com.datastax.driver.core.*;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.google.common.util.concurrent.ListenableFuture;
import com.noorq.casser.mapping.value.ColumnValuePreparer;
import com.noorq.casser.mapping.value.ColumnValueProvider;
import com.noorq.casser.support.CasserException;
import net.helenus.mapping.value.ColumnValuePreparer;
import net.helenus.mapping.value.ColumnValueProvider;
import net.helenus.support.HelenusException;
import javax.xml.validation.Schema;
public abstract class AbstractSessionOperations {
final Logger logger = LoggerFactory.getLogger(getClass());
abstract public Session currentSession();
abstract public String usingKeyspace();
abstract public boolean isShowCql();
abstract public PrintStream getPrintStream();
abstract public Executor getExecutor();
abstract public SessionRepository getSessionRepository();
abstract public ColumnValueProvider getValueProvider();
abstract public ColumnValuePreparer getValuePreparer();
public PreparedStatement prepare(RegularStatement statement) {
try {
log(statement, false);
return currentSession().prepare(statement);
}
catch(RuntimeException e) {
} catch (RuntimeException e) {
throw translateException(e);
}
}
public ListenableFuture<PreparedStatement> prepareAsync(RegularStatement statement) {
try {
log(statement, false);
return currentSession().prepareAsync(statement);
}
catch(RuntimeException e) {
} catch (RuntimeException e) {
throw translateException(e);
}
}
public ResultSet execute(Statement statement, boolean showValues) {
return executeAsync(statement, showValues).getUninterruptibly();
}
public ResultSetFuture executeAsync(Statement statement, boolean showValues) {
try {
log(statement, showValues);
return currentSession().executeAsync(statement);
}
catch(RuntimeException e) {
} catch (RuntimeException e) {
throw translateException(e);
}
}
void log(Statement statement, boolean showValues) {
if (logger.isInfoEnabled()) {
logger.info("Execute statement " + statement);
}
if (isShowCql()) {
if (statement instanceof BuiltStatement) {
BuiltStatement builtStatement = (BuiltStatement) statement;
if (showValues) {
RegularStatement regularStatement = builtStatement.setForceNoValues(true);
printCql(regularStatement.getQueryString());
}
else {
printCql(builtStatement.getQueryString());
}
}
else if (statement instanceof RegularStatement) {
if (isShowCql()) {
if (statement instanceof BuiltStatement) {
BuiltStatement builtStatement = (BuiltStatement) statement;
if (showValues) {
RegularStatement regularStatement = builtStatement.setForceNoValues(true);
printCql(regularStatement.getQueryString());
} else {
printCql(builtStatement.getQueryString());
}
} else if (statement instanceof RegularStatement) {
RegularStatement regularStatement = (RegularStatement) statement;
printCql(regularStatement.getQueryString());
}
else {
} else {
printCql(statement.toString());
}
}
}
public void cache(String key, Object value) {
}
RuntimeException translateException(RuntimeException e) {
if (e instanceof CasserException) {
if (e instanceof HelenusException) {
return e;
}
throw new CasserException(e);
throw new HelenusException(e);
}
void printCql(String cql) {
getPrintStream().println(cql);
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,11 +13,8 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core;
package net.helenus.core;
public enum AutoDdl {
VALIDATE,
UPDATE,
CREATE,
CREATE_DROP;
}
VALIDATE, UPDATE, CREATE, CREATE_DROP;
}

View file

@ -0,0 +1,11 @@
package net.helenus.core;
public class ConflictingUnitOfWorkException extends Exception {
final UnitOfWork uow;
ConflictingUnitOfWorkException(UnitOfWork uow) {
this.uow = uow;
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,14 +13,15 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core;
package net.helenus.core;
import java.util.Optional;
import com.noorq.casser.core.reflect.CasserPropertyNode;
import com.datastax.driver.core.Metadata;
import net.helenus.core.reflect.HelenusPropertyNode;
public interface DslInstantiator {
<E> E instantiate(Class<E> iface, ClassLoader classLoader, Optional<CasserPropertyNode> parent);
<E> E instantiate(Class<E> iface, ClassLoader classLoader, Optional<HelenusPropertyNode> parent, Metadata metadata);
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,33 +13,34 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core;
package net.helenus.core;
import java.util.Objects;
import com.datastax.driver.core.querybuilder.Clause;
import com.noorq.casser.core.reflect.CasserPropertyNode;
import com.noorq.casser.mapping.MappingUtil;
import com.noorq.casser.mapping.value.ColumnValuePreparer;
import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.MappingUtil;
import net.helenus.mapping.value.ColumnValuePreparer;
public final class Filter<V> {
private final CasserPropertyNode node;
private final HelenusPropertyNode node;
private final Postulate<V> postulate;
private Filter(CasserPropertyNode node, Postulate<V> postulate) {
private Filter(HelenusPropertyNode node, Postulate<V> postulate) {
this.node = node;
this.postulate = postulate;
}
public CasserPropertyNode getNode() {
public HelenusPropertyNode getNode() {
return node;
}
public Clause getClause(ColumnValuePreparer valuePreparer) {
return postulate.getClause(node, valuePreparer);
}
public static <V> Filter<V> equal(Getter<V> getter, V val) {
return create(getter, Operator.EQ, val);
}
@ -47,26 +48,26 @@ public final class Filter<V> {
public static <V> Filter<V> in(Getter<V> getter, V... vals) {
Objects.requireNonNull(getter, "empty getter");
Objects.requireNonNull(vals, "empty values");
if (vals.length == 0) {
throw new IllegalArgumentException("values array is empty");
}
for (int i = 0; i != vals.length; ++i) {
Objects.requireNonNull(vals[i], "value[" + i + "] is empty");
}
CasserPropertyNode node = MappingUtil.resolveMappingProperty(getter);
HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter);
Postulate<V> postulate = Postulate.of(Operator.IN, vals);
return new Filter<V>(node, postulate);
}
public static <V> Filter<V> greaterThan(Getter<V> getter, V val) {
return create(getter, Operator.GT, val);
}
public static <V> Filter<V> lessThan(Getter<V> getter, V val) {
return create(getter, Operator.LT, val);
}
@ -83,24 +84,24 @@ public final class Filter<V> {
Objects.requireNonNull(getter, "empty getter");
Objects.requireNonNull(postulate, "empty operator");
CasserPropertyNode node = MappingUtil.resolveMappingProperty(getter);
HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter);
return new Filter<V>(node, postulate);
}
public static <V> Filter<V> create(Getter<V> getter, Operator op, V val) {
Objects.requireNonNull(getter, "empty getter");
Objects.requireNonNull(op, "empty op");
Objects.requireNonNull(val, "empty value");
if (op == Operator.IN) {
throw new IllegalArgumentException("invalid usage of the 'in' operator, use Filter.in() static method");
}
CasserPropertyNode node = MappingUtil.resolveMappingProperty(getter);
HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter);
Postulate<V> postulate = Postulate.of(op, val);
return new Filter<V>(node, postulate);
}
@ -108,7 +109,5 @@ public final class Filter<V> {
public String toString() {
return node.getColumnName() + postulate.toString();
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,10 +13,10 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core;
package net.helenus.core;
public interface Getter<V> {
V get();
}

View file

@ -0,0 +1,187 @@
/*
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.helenus.core;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import com.datastax.driver.core.Cluster;
import com.datastax.driver.core.Metadata;
import com.datastax.driver.core.Session;
import net.helenus.config.DefaultHelenusSettings;
import net.helenus.config.HelenusSettings;
import net.helenus.core.reflect.DslExportable;
import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.HelenusEntity;
import net.helenus.support.HelenusMappingException;
public final class Helenus {
private static volatile HelenusSettings settings = new DefaultHelenusSettings();
private static final ConcurrentMap<Class<?>, Object> dslCache = new ConcurrentHashMap<Class<?>, Object>();
private static final ConcurrentMap<Class<?>, Metadata> metadataForEntity = new ConcurrentHashMap<Class<?>, Metadata>();
private static final Set<HelenusSession> sessions = new HashSet<HelenusSession>();
private static volatile HelenusSession singleton;
private Helenus() {
}
protected static void setSession(HelenusSession session) {
sessions.add(session);
singleton = session;
}
public static HelenusSession session() {
return singleton;
}
public static void shutdown() {
sessions.forEach((session) -> {
session.close();
sessions.remove(session);
});
dslCache.clear();
}
public static HelenusSettings settings() {
return settings;
}
public static HelenusSettings settings(HelenusSettings overrideSettings) {
HelenusSettings old = settings;
settings = overrideSettings;
return old;
}
public static SessionInitializer connect(Cluster cluster) {
Session session = cluster.connect();
return new SessionInitializer(session);
}
public static SessionInitializer connect(Cluster cluster, String keyspace) {
Session session = cluster.connect(keyspace);
return new SessionInitializer(session);
}
public static SessionInitializer init(Session session) {
if (session == null) {
throw new IllegalArgumentException("empty session");
}
return new SessionInitializer(session);
}
public static void clearDslCache() {
dslCache.clear();
}
public static <E> E dsl(Class<E> iface) {
return dsl(iface, null);
}
public static <E> E dsl(Class<E> iface, Metadata metadata) {
return dsl(iface, iface.getClassLoader(), Optional.empty(), metadata);
}
public static <E> E dsl(Class<E> iface, ClassLoader classLoader, Metadata metadata) {
return dsl(iface, classLoader, Optional.empty(), metadata);
}
public static <E> E dsl(Class<E> iface, ClassLoader classLoader, Optional<HelenusPropertyNode> parent,
Metadata metadata) {
Object instance = null;
if (!parent.isPresent()) {
instance = dslCache.get(iface);
}
if (instance == null) {
instance = settings.getDslInstantiator().instantiate(iface, classLoader, parent, metadata);
if (!parent.isPresent()) {
Object c = dslCache.putIfAbsent(iface, instance);
if (c != null) {
instance = c;
}
}
}
return (E) instance;
}
public static <E> E map(Class<E> iface, Map<String, Object> src) {
return map(iface, src, iface.getClassLoader());
}
public static <E> E map(Class<E> iface, Map<String, Object> src, ClassLoader classLoader) {
return settings.getMapperInstantiator().instantiate(iface, src, classLoader);
}
public static HelenusEntity entity(Class<?> iface) {
return entity(iface, metadataForEntity.get(iface));
}
public static HelenusEntity entity(Class<?> iface, Metadata metadata) {
Object dsl = dsl(iface, metadata);
DslExportable e = (DslExportable) dsl;
return e.getHelenusMappingEntity();
}
public static HelenusEntity resolve(Object ifaceOrDsl) {
return resolve(ifaceOrDsl, metadataForEntity.get(ifaceOrDsl));
}
public static HelenusEntity resolve(Object ifaceOrDsl, Metadata metadata) {
if (ifaceOrDsl == null) {
throw new HelenusMappingException("ifaceOrDsl is null");
}
if (ifaceOrDsl instanceof DslExportable) {
DslExportable e = (DslExportable) ifaceOrDsl;
return e.getHelenusMappingEntity();
}
if (ifaceOrDsl instanceof Class) {
Class<?> iface = (Class<?>) ifaceOrDsl;
if (!iface.isInterface()) {
throw new HelenusMappingException("class is not an interface " + iface);
}
metadataForEntity.putIfAbsent(iface, metadata);
return entity(iface, metadata);
}
throw new HelenusMappingException("unknown dsl object or mapping interface " + ifaceOrDsl);
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,84 +13,87 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core;
package net.helenus.core;
import java.io.Closeable;
import java.io.PrintStream;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import com.datastax.driver.core.CloseFuture;
import com.datastax.driver.core.Row;
import com.datastax.driver.core.Session;
import com.noorq.casser.core.operation.CountOperation;
import com.noorq.casser.core.operation.DeleteOperation;
import com.noorq.casser.core.operation.InsertOperation;
import com.noorq.casser.core.operation.SelectOperation;
import com.noorq.casser.core.operation.UpdateOperation;
import com.noorq.casser.core.reflect.CasserPropertyNode;
import com.noorq.casser.mapping.CasserEntity;
import com.noorq.casser.mapping.MappingUtil;
import com.noorq.casser.mapping.value.ColumnValuePreparer;
import com.noorq.casser.mapping.value.ColumnValueProvider;
import com.noorq.casser.mapping.value.RowColumnValueProvider;
import com.noorq.casser.mapping.value.StatementColumnValuePreparer;
import com.noorq.casser.mapping.value.ValueProviderMap;
import com.noorq.casser.support.Fun;
import com.noorq.casser.support.Fun.Tuple1;
import com.noorq.casser.support.Fun.Tuple2;
import com.noorq.casser.support.Fun.Tuple6;
import com.datastax.driver.core.*;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
public final class CasserSession extends AbstractSessionOperations implements Closeable {
import net.helenus.core.operation.*;
import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.MappingUtil;
import net.helenus.mapping.value.*;
import net.helenus.support.Fun;
import net.helenus.support.Fun.Tuple1;
import net.helenus.support.Fun.Tuple2;
import net.helenus.support.Fun.Tuple6;
public final class HelenusSession extends AbstractSessionOperations implements Closeable {
private final int MAX_CACHE_SIZE = 10000;
private final int MAX_CACHE_EXPIRE_SECONDS = 600;
private final Session session;
private final CodecRegistry registry;
private volatile String usingKeyspace;
private volatile boolean showCql;
private final PrintStream printStream;
private final SessionRepository sessionRepository;
private final Executor executor;
private final boolean dropSchemaOnClose;
private final RowColumnValueProvider valueProvider;
private final StatementColumnValuePreparer valuePreparer;
CasserSession(Session session,
String usingKeyspace,
boolean showCql,
PrintStream printStream,
SessionRepositoryBuilder sessionRepositoryBuilder,
Executor executor,
boolean dropSchemaOnClose) {
private final Metadata metadata;
private final Cache<String, Object> sessionCache;
private UnitOfWork currentUnitOfWork;
HelenusSession(Session session, String usingKeyspace, CodecRegistry registry, boolean showCql,
PrintStream printStream, SessionRepositoryBuilder sessionRepositoryBuilder, Executor executor,
boolean dropSchemaOnClose) {
this.session = session;
this.usingKeyspace = Objects.requireNonNull(usingKeyspace, "keyspace needs to be selected before creating session");
this.registry = registry == null ? CodecRegistry.DEFAULT_INSTANCE : registry;
this.usingKeyspace = Objects.requireNonNull(usingKeyspace,
"keyspace needs to be selected before creating session");
this.showCql = showCql;
this.printStream = printStream;
this.sessionRepository = sessionRepositoryBuilder.build();
this.executor = executor;
this.dropSchemaOnClose = dropSchemaOnClose;
this.valueProvider = new RowColumnValueProvider(this.sessionRepository);
this.valuePreparer = new StatementColumnValuePreparer(this.sessionRepository);
this.metadata = session.getCluster().getMetadata();
this.sessionCache = CacheBuilder.newBuilder().maximumSize(MAX_CACHE_SIZE)
.expireAfterAccess(MAX_CACHE_EXPIRE_SECONDS, TimeUnit.SECONDS).recordStats().build();
this.currentUnitOfWork = null;
}
@Override
public Session currentSession() {
return session;
}
@Override
public String usingKeyspace() {
return usingKeyspace;
}
public CasserSession useKeyspace(String keyspace) {
public HelenusSession useKeyspace(String keyspace) {
session.execute(SchemaUtil.use(keyspace, false));
this.usingKeyspace = keyspace;
return this;
}
@Override
public boolean isShowCql() {
return showCql;
@ -101,16 +104,16 @@ public final class CasserSession extends AbstractSessionOperations implements Cl
return printStream;
}
public CasserSession showCql() {
public HelenusSession showCql() {
this.showCql = true;
return this;
}
public CasserSession showCql(boolean showCql) {
public HelenusSession showCql(boolean showCql) {
this.showCql = showCql;
return this;
}
@Override
public Executor getExecutor() {
return executor;
@ -125,125 +128,155 @@ public final class CasserSession extends AbstractSessionOperations implements Cl
public ColumnValueProvider getValueProvider() {
return valueProvider;
}
@Override
public ColumnValuePreparer getValuePreparer() {
return valuePreparer;
}
public Metadata getMetadata() { return metadata; }
public synchronized UnitOfWork begin() {
if (currentUnitOfWork == null) {
currentUnitOfWork = new UnitOfWork(this);
return currentUnitOfWork;
} else {
return currentUnitOfWork.begin();
}
}
public synchronized void commit() throws ConflictingUnitOfWorkException {
if (currentUnitOfWork != null) {
currentUnitOfWork.commit();
currentUnitOfWork = null;
}
}
public synchronized void abort() {
if (currentUnitOfWork != null) {
currentUnitOfWork.abort();
currentUnitOfWork = null;
}
}
public void cache(String key, Object value) {
sessionCache.put(key, value); // ttl
}
public <E> SelectOperation<E> select(Class<E> entityClass) {
Objects.requireNonNull(entityClass, "entityClass is empty");
Objects.requireNonNull(entityClass, "entityClass is empty");
ColumnValueProvider valueProvider = getValueProvider();
CasserEntity entity = Casser.entity(entityClass);
HelenusEntity entity = Helenus.entity(entityClass);
return new SelectOperation<E>(this, entity, (r) -> {
Map<String, Object> map = new ValueProviderMap(r, valueProvider, entity);
return (E) Casser.map(entityClass, map);
return (E) Helenus.map(entityClass, map);
});
}
public SelectOperation<Fun.ArrayTuple> select() {
return new SelectOperation<Fun.ArrayTuple>(this);
}
public SelectOperation<Row> selectAll(Class<?> entityClass) {
Objects.requireNonNull(entityClass, "entityClass is empty");
return new SelectOperation<Row>(this, Casser.entity(entityClass));
return new SelectOperation<Row>(this, Helenus.entity(entityClass));
}
public <E> SelectOperation<E> selectAll(Class<E> entityClass, Function<Row, E> rowMapper) {
Objects.requireNonNull(entityClass, "entityClass is empty");
Objects.requireNonNull(rowMapper, "rowMapper is empty");
return new SelectOperation<E>(this, Casser.entity(entityClass), rowMapper);
return new SelectOperation<E>(this, Helenus.entity(entityClass), rowMapper);
}
public <V1> SelectOperation<Fun.Tuple1<V1>> select(Getter<V1> getter1) {
Objects.requireNonNull(getter1, "field 1 is empty");
CasserPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
HelenusPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
return new SelectOperation<Tuple1<V1>>(this, new Mappers.Mapper1<V1>(getValueProvider(), p1), p1);
}
public <V1, V2> SelectOperation<Tuple2<V1, V2>> select(Getter<V1> getter1, Getter<V2> getter2) {
Objects.requireNonNull(getter1, "field 1 is empty");
Objects.requireNonNull(getter2, "field 2 is empty");
CasserPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
CasserPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
return new SelectOperation<Fun.Tuple2<V1, V2>>(this, new Mappers.Mapper2<V1, V2>(getValueProvider(), p1, p2), p1, p2);
HelenusPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
HelenusPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
return new SelectOperation<Fun.Tuple2<V1, V2>>(this, new Mappers.Mapper2<V1, V2>(getValueProvider(), p1, p2),
p1, p2);
}
public <V1, V2, V3> SelectOperation<Fun.Tuple3<V1, V2, V3>> select(Getter<V1> getter1, Getter<V2> getter2, Getter<V3> getter3) {
public <V1, V2, V3> SelectOperation<Fun.Tuple3<V1, V2, V3>> select(Getter<V1> getter1, Getter<V2> getter2,
Getter<V3> getter3) {
Objects.requireNonNull(getter1, "field 1 is empty");
Objects.requireNonNull(getter2, "field 2 is empty");
Objects.requireNonNull(getter3, "field 3 is empty");
CasserPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
CasserPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
CasserPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3);
return new SelectOperation<Fun.Tuple3<V1, V2, V3>>(this, new Mappers.Mapper3<V1, V2, V3>(getValueProvider(), p1, p2, p3), p1, p2, p3);
HelenusPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
HelenusPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
HelenusPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3);
return new SelectOperation<Fun.Tuple3<V1, V2, V3>>(this,
new Mappers.Mapper3<V1, V2, V3>(getValueProvider(), p1, p2, p3), p1, p2, p3);
}
public <V1, V2, V3, V4> SelectOperation<Fun.Tuple4<V1, V2, V3, V4>> select(
Getter<V1> getter1, Getter<V2> getter2, Getter<V3> getter3, Getter<V4> getter4) {
public <V1, V2, V3, V4> SelectOperation<Fun.Tuple4<V1, V2, V3, V4>> select(Getter<V1> getter1, Getter<V2> getter2,
Getter<V3> getter3, Getter<V4> getter4) {
Objects.requireNonNull(getter1, "field 1 is empty");
Objects.requireNonNull(getter2, "field 2 is empty");
Objects.requireNonNull(getter3, "field 3 is empty");
Objects.requireNonNull(getter4, "field 4 is empty");
CasserPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
CasserPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
CasserPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3);
CasserPropertyNode p4 = MappingUtil.resolveMappingProperty(getter4);
return new SelectOperation<Fun.Tuple4<V1, V2, V3, V4>>(this, new Mappers.Mapper4<V1, V2, V3, V4>(getValueProvider(), p1, p2, p3, p4), p1, p2, p3, p4);
HelenusPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
HelenusPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
HelenusPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3);
HelenusPropertyNode p4 = MappingUtil.resolveMappingProperty(getter4);
return new SelectOperation<Fun.Tuple4<V1, V2, V3, V4>>(this,
new Mappers.Mapper4<V1, V2, V3, V4>(getValueProvider(), p1, p2, p3, p4), p1, p2, p3, p4);
}
public <V1, V2, V3, V4, V5> SelectOperation<Fun.Tuple5<V1, V2, V3, V4, V5>> select(
Getter<V1> getter1, Getter<V2> getter2, Getter<V3> getter3, Getter<V4> getter4, Getter<V5> getter5) {
public <V1, V2, V3, V4, V5> SelectOperation<Fun.Tuple5<V1, V2, V3, V4, V5>> select(Getter<V1> getter1,
Getter<V2> getter2, Getter<V3> getter3, Getter<V4> getter4, Getter<V5> getter5) {
Objects.requireNonNull(getter1, "field 1 is empty");
Objects.requireNonNull(getter2, "field 2 is empty");
Objects.requireNonNull(getter3, "field 3 is empty");
Objects.requireNonNull(getter4, "field 4 is empty");
Objects.requireNonNull(getter5, "field 5 is empty");
CasserPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
CasserPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
CasserPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3);
CasserPropertyNode p4 = MappingUtil.resolveMappingProperty(getter4);
CasserPropertyNode p5 = MappingUtil.resolveMappingProperty(getter5);
return new SelectOperation<Fun.Tuple5<V1, V2, V3, V4, V5>>(this,
new Mappers.Mapper5<V1, V2, V3, V4, V5>(getValueProvider(), p1, p2, p3, p4, p5),
p1, p2, p3, p4, p5);
HelenusPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
HelenusPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
HelenusPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3);
HelenusPropertyNode p4 = MappingUtil.resolveMappingProperty(getter4);
HelenusPropertyNode p5 = MappingUtil.resolveMappingProperty(getter5);
return new SelectOperation<Fun.Tuple5<V1, V2, V3, V4, V5>>(this,
new Mappers.Mapper5<V1, V2, V3, V4, V5>(getValueProvider(), p1, p2, p3, p4, p5), p1, p2, p3, p4, p5);
}
public <V1, V2, V3, V4, V5, V6> SelectOperation<Fun.Tuple6<V1, V2, V3, V4, V5, V6>> select(
Getter<V1> getter1, Getter<V2> getter2, Getter<V3> getter3,
Getter<V4> getter4, Getter<V5> getter5, Getter<V6> getter6) {
public <V1, V2, V3, V4, V5, V6> SelectOperation<Fun.Tuple6<V1, V2, V3, V4, V5, V6>> select(Getter<V1> getter1,
Getter<V2> getter2, Getter<V3> getter3, Getter<V4> getter4, Getter<V5> getter5, Getter<V6> getter6) {
Objects.requireNonNull(getter1, "field 1 is empty");
Objects.requireNonNull(getter2, "field 2 is empty");
Objects.requireNonNull(getter3, "field 3 is empty");
Objects.requireNonNull(getter4, "field 4 is empty");
Objects.requireNonNull(getter5, "field 5 is empty");
Objects.requireNonNull(getter6, "field 6 is empty");
CasserPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
CasserPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
CasserPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3);
CasserPropertyNode p4 = MappingUtil.resolveMappingProperty(getter4);
CasserPropertyNode p5 = MappingUtil.resolveMappingProperty(getter5);
CasserPropertyNode p6 = MappingUtil.resolveMappingProperty(getter6);
return new SelectOperation<Tuple6<V1, V2, V3, V4, V5, V6>>(this,
new Mappers.Mapper6<V1, V2, V3, V4, V5, V6>(getValueProvider(), p1, p2, p3, p4, p5, p6),
p1, p2, p3, p4, p5, p6);
HelenusPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
HelenusPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
HelenusPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3);
HelenusPropertyNode p4 = MappingUtil.resolveMappingProperty(getter4);
HelenusPropertyNode p5 = MappingUtil.resolveMappingProperty(getter5);
HelenusPropertyNode p6 = MappingUtil.resolveMappingProperty(getter6);
return new SelectOperation<Tuple6<V1, V2, V3, V4, V5, V6>>(this,
new Mappers.Mapper6<V1, V2, V3, V4, V5, V6>(getValueProvider(), p1, p2, p3, p4, p5, p6), p1, p2, p3, p4,
p5, p6);
}
public <V1, V2, V3, V4, V5, V6, V7> SelectOperation<Fun.Tuple7<V1, V2, V3, V4, V5, V6, V7>> select(
Getter<V1> getter1, Getter<V2> getter2, Getter<V3> getter3,
Getter<V4> getter4, Getter<V5> getter5, Getter<V6> getter6,
Getter<V7> getter7) {
Getter<V1> getter1, Getter<V2> getter2, Getter<V3> getter3, Getter<V4> getter4, Getter<V5> getter5,
Getter<V6> getter6, Getter<V7> getter7) {
Objects.requireNonNull(getter1, "field 1 is empty");
Objects.requireNonNull(getter2, "field 2 is empty");
Objects.requireNonNull(getter3, "field 3 is empty");
@ -251,95 +284,93 @@ public final class CasserSession extends AbstractSessionOperations implements Cl
Objects.requireNonNull(getter5, "field 5 is empty");
Objects.requireNonNull(getter6, "field 6 is empty");
Objects.requireNonNull(getter7, "field 7 is empty");
CasserPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
CasserPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
CasserPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3);
CasserPropertyNode p4 = MappingUtil.resolveMappingProperty(getter4);
CasserPropertyNode p5 = MappingUtil.resolveMappingProperty(getter5);
CasserPropertyNode p6 = MappingUtil.resolveMappingProperty(getter6);
CasserPropertyNode p7 = MappingUtil.resolveMappingProperty(getter7);
return new SelectOperation<Fun.Tuple7<V1, V2, V3, V4, V5, V6, V7>>(this,
new Mappers.Mapper7<V1, V2, V3, V4, V5, V6, V7>(
getValueProvider(),
p1, p2, p3, p4, p5, p6, p7),
p1, p2, p3, p4, p5, p6, p7);
HelenusPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
HelenusPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
HelenusPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3);
HelenusPropertyNode p4 = MappingUtil.resolveMappingProperty(getter4);
HelenusPropertyNode p5 = MappingUtil.resolveMappingProperty(getter5);
HelenusPropertyNode p6 = MappingUtil.resolveMappingProperty(getter6);
HelenusPropertyNode p7 = MappingUtil.resolveMappingProperty(getter7);
return new SelectOperation<Fun.Tuple7<V1, V2, V3, V4, V5, V6, V7>>(this,
new Mappers.Mapper7<V1, V2, V3, V4, V5, V6, V7>(getValueProvider(), p1, p2, p3, p4, p5, p6, p7), p1, p2,
p3, p4, p5, p6, p7);
}
public CountOperation count() {
return new CountOperation(this);
}
public CountOperation count(Object dsl) {
Objects.requireNonNull(dsl, "dsl is empty");
return new CountOperation(this, Casser.resolve(dsl));
return new CountOperation(this, Helenus.resolve(dsl));
}
public <V> UpdateOperation update() {
return new UpdateOperation(this);
}
public <V> UpdateOperation update(Getter<V> getter, V v) {
Objects.requireNonNull(getter, "field is empty");
Objects.requireNonNull(v, "value is empty");
CasserPropertyNode p = MappingUtil.resolveMappingProperty(getter);
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(getter);
return new UpdateOperation(this, p, v);
}
public InsertOperation insert() {
return new InsertOperation(this, true);
}
public InsertOperation insert(Object pojo) {
Objects.requireNonNull(pojo, "pojo is empty");
Class<?> iface = MappingUtil.getMappingInterface(pojo);
CasserEntity entity = Casser.entity(iface);
HelenusEntity entity = Helenus.entity(iface);
return new InsertOperation(this, entity, pojo, true);
}
public InsertOperation upsert() {
return new InsertOperation(this, false);
}
public InsertOperation upsert(Object pojo) {
Objects.requireNonNull(pojo, "pojo is empty");
Class<?> iface = MappingUtil.getMappingInterface(pojo);
CasserEntity entity = Casser.entity(iface);
HelenusEntity entity = Helenus.entity(iface);
return new InsertOperation(this, entity, pojo, false);
}
public DeleteOperation delete() {
return new DeleteOperation(this);
}
public DeleteOperation delete(Object dsl) {
Objects.requireNonNull(dsl, "dsl is empty");
return new DeleteOperation(this, Casser.resolve(dsl));
return new DeleteOperation(this, Helenus.resolve(dsl));
}
public Session getSession() {
return session;
}
public void close() {
if (session.isClosed()) {
return;
}
if (dropSchemaOnClose) {
dropSchema();
}
session.close();
}
public CloseFuture closeAsync() {
if (!session.isClosed() && dropSchemaOnClose) {
@ -348,31 +379,27 @@ public final class CasserSession extends AbstractSessionOperations implements Cl
return session.closeAsync();
}
private void dropSchema() {
sessionRepository.entities().forEach(e -> dropEntity(e));
}
private void dropEntity(CasserEntity entity) {
switch(entity.getType()) {
case TABLE:
execute(SchemaUtil.dropTable(entity), true);
break;
case UDT:
execute(SchemaUtil.dropUserType(entity), true);
break;
private void dropEntity(HelenusEntity entity) {
switch (entity.getType()) {
case TABLE :
execute(SchemaUtil.dropTable(entity), true);
break;
case UDT :
execute(SchemaUtil.dropUserType(entity), true);
break;
}
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,41 +13,39 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core;
package net.helenus.core;
import java.lang.annotation.Annotation;
import javax.validation.ConstraintValidator;
import com.noorq.casser.mapping.CasserProperty;
import com.noorq.casser.support.CasserException;
import com.noorq.casser.support.CasserMappingException;
import net.helenus.mapping.HelenusProperty;
import net.helenus.support.HelenusException;
import net.helenus.support.HelenusMappingException;
public enum CasserValidator implements PropertyValueValidator {
public enum HelenusValidator implements PropertyValueValidator {
INSTANCE;
public void validate(CasserProperty prop, Object value) {
public void validate(HelenusProperty prop, Object value) {
for (ConstraintValidator<? extends Annotation, ?> validator : prop.getValidators()) {
ConstraintValidator typeless = (ConstraintValidator) validator;
boolean valid = false;
try {
valid = typeless.isValid(value, null);
} catch (ClassCastException e) {
throw new HelenusMappingException("validator was used for wrong type '" + value + "' in " + prop, e);
}
catch(ClassCastException e) {
throw new CasserMappingException("validator was used for wrong type '" + value + "' in " + prop, e);
}
if (!valid) {
throw new CasserException("wrong value '" + value + "' for " + prop);
throw new HelenusException("wrong value '" + value + "' for " + prop);
}
}
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,12 +13,12 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core;
package net.helenus.core;
import java.util.Map;
public interface MapperInstantiator {
<E> E instantiate(Class<E> iface, Map<String, Object> src, ClassLoader classLoader);
<E> E instantiate(Class<E> iface, Map<String, Object> src, ClassLoader classLoader);
}

View file

@ -0,0 +1,187 @@
/*
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.helenus.core;
import java.util.function.Function;
import com.datastax.driver.core.Row;
import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.HelenusProperty;
import net.helenus.mapping.value.ColumnValueProvider;
import net.helenus.support.Fun;
public final class Mappers {
private Mappers() {
}
public final static class Mapper1<A> implements Function<Row, Fun.Tuple1<A>> {
private final ColumnValueProvider provider;
private final HelenusProperty p1;
public Mapper1(ColumnValueProvider provider, HelenusPropertyNode p1) {
this.provider = provider;
this.p1 = p1.getProperty();
}
@Override
public Fun.Tuple1<A> apply(Row row) {
return new Fun.Tuple1<A>(provider.getColumnValue(row, 0, p1));
}
}
public final static class Mapper2<A, B> implements Function<Row, Fun.Tuple2<A, B>> {
private final ColumnValueProvider provider;
private final HelenusProperty p1;
private final HelenusProperty p2;
public Mapper2(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2) {
this.provider = provider;
this.p1 = p1.getProperty();
this.p2 = p2.getProperty();
}
@Override
public Fun.Tuple2<A, B> apply(Row row) {
return new Fun.Tuple2<A, B>(provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2));
}
}
public final static class Mapper3<A, B, C> implements Function<Row, Fun.Tuple3<A, B, C>> {
private final ColumnValueProvider provider;
private final HelenusProperty p1;
private final HelenusProperty p2;
private final HelenusProperty p3;
public Mapper3(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2,
HelenusPropertyNode p3) {
this.provider = provider;
this.p1 = p1.getProperty();
this.p2 = p2.getProperty();
this.p3 = p3.getProperty();
}
@Override
public Fun.Tuple3<A, B, C> apply(Row row) {
return new Fun.Tuple3<A, B, C>(provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2),
provider.getColumnValue(row, 2, p3));
}
}
public final static class Mapper4<A, B, C, D> implements Function<Row, Fun.Tuple4<A, B, C, D>> {
private final ColumnValueProvider provider;
private final HelenusProperty p1;
private final HelenusProperty p2;
private final HelenusProperty p3;
private final HelenusProperty p4;
public Mapper4(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2,
HelenusPropertyNode p3, HelenusPropertyNode p4) {
this.provider = provider;
this.p1 = p1.getProperty();
this.p2 = p2.getProperty();
this.p3 = p3.getProperty();
this.p4 = p4.getProperty();
}
@Override
public Fun.Tuple4<A, B, C, D> apply(Row row) {
return new Fun.Tuple4<A, B, C, D>(provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2),
provider.getColumnValue(row, 2, p3), provider.getColumnValue(row, 3, p4));
}
}
public final static class Mapper5<A, B, C, D, E> implements Function<Row, Fun.Tuple5<A, B, C, D, E>> {
private final ColumnValueProvider provider;
private final HelenusProperty p1, p2, p3, p4, p5;
public Mapper5(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2,
HelenusPropertyNode p3, HelenusPropertyNode p4, HelenusPropertyNode p5) {
this.provider = provider;
this.p1 = p1.getProperty();
this.p2 = p2.getProperty();
this.p3 = p3.getProperty();
this.p4 = p4.getProperty();
this.p5 = p5.getProperty();
}
@Override
public Fun.Tuple5<A, B, C, D, E> apply(Row row) {
return new Fun.Tuple5<A, B, C, D, E>(provider.getColumnValue(row, 0, p1),
provider.getColumnValue(row, 1, p2), provider.getColumnValue(row, 2, p3),
provider.getColumnValue(row, 3, p4), provider.getColumnValue(row, 4, p5));
}
}
public final static class Mapper6<A, B, C, D, E, F> implements Function<Row, Fun.Tuple6<A, B, C, D, E, F>> {
private final ColumnValueProvider provider;
private final HelenusProperty p1, p2, p3, p4, p5, p6;
public Mapper6(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2,
HelenusPropertyNode p3, HelenusPropertyNode p4, HelenusPropertyNode p5, HelenusPropertyNode p6) {
this.provider = provider;
this.p1 = p1.getProperty();
this.p2 = p2.getProperty();
this.p3 = p3.getProperty();
this.p4 = p4.getProperty();
this.p5 = p5.getProperty();
this.p6 = p6.getProperty();
}
@Override
public Fun.Tuple6<A, B, C, D, E, F> apply(Row row) {
return new Fun.Tuple6<A, B, C, D, E, F>(provider.getColumnValue(row, 0, p1),
provider.getColumnValue(row, 1, p2), provider.getColumnValue(row, 2, p3),
provider.getColumnValue(row, 3, p4), provider.getColumnValue(row, 4, p5),
provider.getColumnValue(row, 5, p6));
}
}
public final static class Mapper7<A, B, C, D, E, F, G> implements Function<Row, Fun.Tuple7<A, B, C, D, E, F, G>> {
private final ColumnValueProvider provider;
private final HelenusProperty p1, p2, p3, p4, p5, p6, p7;
public Mapper7(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2,
HelenusPropertyNode p3, HelenusPropertyNode p4, HelenusPropertyNode p5, HelenusPropertyNode p6,
HelenusPropertyNode p7) {
this.provider = provider;
this.p1 = p1.getProperty();
this.p2 = p2.getProperty();
this.p3 = p3.getProperty();
this.p4 = p4.getProperty();
this.p5 = p5.getProperty();
this.p6 = p6.getProperty();
this.p7 = p7.getProperty();
}
@Override
public Fun.Tuple7<A, B, C, D, E, F, G> apply(Row row) {
return new Fun.Tuple7<A, B, C, D, E, F, G>(provider.getColumnValue(row, 0, p1),
provider.getColumnValue(row, 1, p2), provider.getColumnValue(row, 2, p3),
provider.getColumnValue(row, 3, p4), provider.getColumnValue(row, 4, p5),
provider.getColumnValue(row, 5, p6), provider.getColumnValue(row, 6, p7));
}
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core;
package net.helenus.core;
import java.util.HashMap;
import java.util.Map;
@ -21,7 +21,7 @@ import java.util.Map;
public enum Operator {
EQ("=="),
IN("in"),
GT(">"),
@ -31,17 +31,17 @@ public enum Operator {
GTE(">="),
LTE("<=");
private final String name;
private final static Map<String, Operator> indexByName = new HashMap<String, Operator>();
static {
for (Operator fo : Operator.values()) {
indexByName.put(fo.getName(), fo);
}
}
private Operator(String name) {
this.name = name;
}
@ -49,9 +49,9 @@ public enum Operator {
public String getName() {
return name;
}
public static Operator findByOperator(String name) {
return indexByName.get(name);
}
}

View file

@ -1,14 +1,15 @@
package com.noorq.casser.core;
package net.helenus.core;
import java.util.Objects;
import com.datastax.driver.core.querybuilder.Ordering;
import com.datastax.driver.core.querybuilder.QueryBuilder;
import com.noorq.casser.core.reflect.CasserPropertyNode;
import com.noorq.casser.mapping.ColumnType;
import com.noorq.casser.mapping.MappingUtil;
import com.noorq.casser.mapping.OrderingDirection;
import com.noorq.casser.support.CasserMappingException;
import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.ColumnType;
import net.helenus.mapping.MappingUtil;
import net.helenus.mapping.OrderingDirection;
import net.helenus.support.HelenusMappingException;
public final class Ordered {
@ -19,29 +20,30 @@ public final class Ordered {
this.getter = getter;
this.direction = direction;
}
public Ordering getOrdering() {
Objects.requireNonNull(getter, "property is null");
Objects.requireNonNull(direction, "direction is null");
CasserPropertyNode propNode = MappingUtil.resolveMappingProperty(getter);
HelenusPropertyNode propNode = MappingUtil.resolveMappingProperty(getter);
if (propNode.getProperty().getColumnType() != ColumnType.CLUSTERING_COLUMN) {
throw new CasserMappingException("property must be a clustering column " + propNode.getProperty().getPropertyName());
throw new HelenusMappingException(
"property must be a clustering column " + propNode.getProperty().getPropertyName());
}
switch(direction) {
case ASC:
switch (direction) {
case ASC :
return QueryBuilder.asc(propNode.getColumnName());
case DESC:
case DESC :
return QueryBuilder.desc(propNode.getColumnName());
}
throw new CasserMappingException("invalid direction " + direction);
throw new HelenusMappingException("invalid direction " + direction);
}
}

View file

@ -0,0 +1,102 @@
/*
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.helenus.core;
import com.datastax.driver.core.querybuilder.Clause;
import com.datastax.driver.core.querybuilder.QueryBuilder;
import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.value.ColumnValuePreparer;
import net.helenus.support.HelenusMappingException;
public final class Postulate<V> {
private final Operator operator;
private final V[] values;
protected Postulate(Operator op, V[] values) {
this.operator = op;
this.values = values;
}
public static <V> Postulate<V> of(Operator op, V... values) {
return new Postulate<V>(op, values);
}
public Clause getClause(HelenusPropertyNode node, ColumnValuePreparer valuePreparer) {
switch (operator) {
case EQ :
return QueryBuilder.eq(node.getColumnName(),
valuePreparer.prepareColumnValue(values[0], node.getProperty()));
case IN :
Object[] preparedValues = new Object[values.length];
for (int i = 0; i != values.length; ++i) {
preparedValues[i] = valuePreparer.prepareColumnValue(values[i], node.getProperty());
}
return QueryBuilder.in(node.getColumnName(), preparedValues);
case LT :
return QueryBuilder.lt(node.getColumnName(),
valuePreparer.prepareColumnValue(values[0], node.getProperty()));
case LTE :
return QueryBuilder.lte(node.getColumnName(),
valuePreparer.prepareColumnValue(values[0], node.getProperty()));
case GT :
return QueryBuilder.gt(node.getColumnName(),
valuePreparer.prepareColumnValue(values[0], node.getProperty()));
case GTE :
return QueryBuilder.gte(node.getColumnName(),
valuePreparer.prepareColumnValue(values[0], node.getProperty()));
default :
throw new HelenusMappingException("unknown filter operation " + operator);
}
}
@Override
public String toString() {
if (operator == Operator.IN) {
if (values == null) {
return "in()";
}
int len = values.length;
StringBuilder b = new StringBuilder();
b.append("in(");
for (int i = 0; i != len; i++) {
if (b.length() > 3) {
b.append(", ");
}
b.append(String.valueOf(values[i]));
}
return b.append(')').toString();
}
return operator.getName() + values[0];
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,12 +13,12 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core;
package net.helenus.core;
import com.noorq.casser.mapping.CasserProperty;
import net.helenus.mapping.HelenusProperty;
public interface PropertyValueValidator {
void validate(CasserProperty prop, Object value);
void validate(HelenusProperty prop, Object value);
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core;
package net.helenus.core;
import java.util.List;
import java.util.Map;
@ -21,10 +21,11 @@ import java.util.Objects;
import com.datastax.driver.core.querybuilder.BindMarker;
import com.datastax.driver.core.querybuilder.QueryBuilder;
import com.noorq.casser.mapping.OrderingDirection;
import net.helenus.mapping.OrderingDirection;
/**
* Sugar methods for the queries
* Sugar methods for the queries
*
*/
@ -34,11 +35,11 @@ public final class Query {
}
public static BindMarker marker() {
return QueryBuilder.bindMarker();
return QueryBuilder.bindMarker();
}
public static BindMarker marker(String name) {
return QueryBuilder.bindMarker(name);
return QueryBuilder.bindMarker(name);
}
public static Ordered asc(Getter<?> getter) {
@ -52,19 +53,19 @@ public final class Query {
public static <V> Postulate<V> eq(V val) {
return Postulate.of(Operator.EQ, val);
}
public static <V> Postulate<V> lt(V val) {
return Postulate.of(Operator.LT, val);
}
public static <V> Postulate<V> lte(V val) {
return Postulate.of(Operator.LTE, val);
}
public static <V> Postulate<V> gt(V val) {
return Postulate.of(Operator.GT, val);
}
public static <V> Postulate<V> gte(V val) {
return Postulate.of(Operator.GTE, val);
}
@ -72,34 +73,32 @@ public final class Query {
public static <V> Postulate<V> in(V[] vals) {
return new Postulate<V>(Operator.IN, vals);
}
public static <K,V> Getter<V> getIdx(Getter<List<V>> listGetter, int index) {
Objects.requireNonNull(listGetter, "listGetter is null");
return new Getter<V>() {
public static <K, V> Getter<V> getIdx(Getter<List<V>> listGetter, int index) {
Objects.requireNonNull(listGetter, "listGetter is null");
return new Getter<V>() {
@Override
public V get() {
return listGetter.get().get(index);
}
};
}
public static <K, V> Getter<V> get(Getter<Map<K, V>> mapGetter, K k) {
Objects.requireNonNull(mapGetter, "mapGetter is null");
Objects.requireNonNull(k, "key is null");
return new Getter<V>() {
};
}
public static <K, V> Getter<V> get(Getter<Map<K, V>> mapGetter, K k) {
Objects.requireNonNull(mapGetter, "mapGetter is null");
Objects.requireNonNull(k, "key is null");
return new Getter<V>() {
@Override
public V get() {
return mapGetter.get().get(k);
}
};
}
};
}
}

View file

@ -0,0 +1,365 @@
/*
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.helenus.core;
import java.util.*;
import java.util.stream.Collectors;
import com.datastax.driver.core.*;
import com.datastax.driver.core.IndexMetadata;
import com.datastax.driver.core.schemabuilder.*;
import com.datastax.driver.core.schemabuilder.Create.Options;
import net.helenus.mapping.*;
import net.helenus.mapping.ColumnType;
import net.helenus.mapping.type.OptionalColumnMetadata;
import net.helenus.support.CqlUtil;
import net.helenus.support.HelenusMappingException;
public final class SchemaUtil {
private SchemaUtil() {
}
public static RegularStatement use(String keyspace, boolean forceQuote) {
if (forceQuote) {
return new SimpleStatement("USE" + CqlUtil.forceQuote(keyspace));
} else {
return new SimpleStatement("USE " + keyspace);
}
}
public static SchemaStatement createUserType(HelenusEntity entity) {
if (entity.getType() != HelenusEntityType.UDT) {
throw new HelenusMappingException("expected UDT entity " + entity);
}
CreateType create = SchemaBuilder.createType(entity.getName().toCql());
for (HelenusProperty prop : entity.getOrderedProperties()) {
ColumnType columnType = prop.getColumnType();
if (columnType == ColumnType.PARTITION_KEY || columnType == ColumnType.CLUSTERING_COLUMN) {
throw new HelenusMappingException("primary key columns are not supported in UserDefinedType for "
+ prop.getPropertyName() + " in entity " + entity);
}
try {
prop.getDataType().addColumn(create, prop.getColumnName());
} catch (IllegalArgumentException e) {
throw new HelenusMappingException("invalid column name '" + prop.getColumnName() + "' in entity '"
+ entity.getName().getName() + "'", e);
}
}
return create;
}
public static List<SchemaStatement> alterUserType(UserType userType, HelenusEntity entity,
boolean dropUnusedColumns) {
if (entity.getType() != HelenusEntityType.UDT) {
throw new HelenusMappingException("expected UDT entity " + entity);
}
List<SchemaStatement> result = new ArrayList<SchemaStatement>();
/**
* TODO: In future replace SchemaBuilder.alterTable by SchemaBuilder.alterType
* when it will exist
*/
Alter alter = SchemaBuilder.alterTable(entity.getName().toCql());
final Set<String> visitedColumns = dropUnusedColumns ? new HashSet<String>() : Collections.<String>emptySet();
for (HelenusProperty prop : entity.getOrderedProperties()) {
String columnName = prop.getColumnName().getName();
if (dropUnusedColumns) {
visitedColumns.add(columnName);
}
ColumnType columnType = prop.getColumnType();
if (columnType == ColumnType.PARTITION_KEY || columnType == ColumnType.CLUSTERING_COLUMN) {
continue;
}
DataType dataType = userType.getFieldType(columnName);
SchemaStatement stmt = prop.getDataType().alterColumn(alter, prop.getColumnName(),
optional(columnName, dataType));
if (stmt != null) {
result.add(stmt);
}
}
if (dropUnusedColumns) {
for (String field : userType.getFieldNames()) {
if (!visitedColumns.contains(field)) {
result.add(alter.dropColumn(field));
}
}
}
return result;
}
public static SchemaStatement dropUserType(HelenusEntity entity) {
if (entity.getType() != HelenusEntityType.UDT) {
throw new HelenusMappingException("expected UDT entity " + entity);
}
return SchemaBuilder.dropType(entity.getName().toCql()).ifExists();
}
public static SchemaStatement dropUserType(UserType type) {
return SchemaBuilder.dropType(type.getTypeName()).ifExists();
}
public static SchemaStatement createTable(HelenusEntity entity) {
if (entity.getType() != HelenusEntityType.TABLE) {
throw new HelenusMappingException("expected table entity " + entity);
}
// NOTE: There is a bug in the normal path of createTable where the
// "cache" is set too early and never unset preventing more than
// one column on a table.
// SchemaBuilder.createTable(entity.getName().toCql());
CreateTable create = new CreateTable(entity.getName().toCql());
create.ifNotExists();
List<HelenusProperty> clusteringColumns = new ArrayList<HelenusProperty>();
for (HelenusProperty prop : entity.getOrderedProperties()) {
ColumnType columnType = prop.getColumnType();
if (columnType == ColumnType.CLUSTERING_COLUMN) {
clusteringColumns.add(prop);
}
prop.getDataType().addColumn(create, prop.getColumnName());
}
if (!clusteringColumns.isEmpty()) {
Options options = create.withOptions();
clusteringColumns
.forEach(p -> options.clusteringOrder(p.getColumnName().toCql(), mapDirection(p.getOrdering())));
}
return create;
}
public static List<SchemaStatement> alterTable(TableMetadata tmd, HelenusEntity entity, boolean dropUnusedColumns) {
if (entity.getType() != HelenusEntityType.TABLE) {
throw new HelenusMappingException("expected table entity " + entity);
}
List<SchemaStatement> result = new ArrayList<SchemaStatement>();
Alter alter = SchemaBuilder.alterTable(entity.getName().toCql());
final Set<String> visitedColumns = dropUnusedColumns ? new HashSet<String>() : Collections.<String>emptySet();
for (HelenusProperty prop : entity.getOrderedProperties()) {
String columnName = prop.getColumnName().getName();
if (dropUnusedColumns) {
visitedColumns.add(columnName);
}
ColumnType columnType = prop.getColumnType();
if (columnType == ColumnType.PARTITION_KEY || columnType == ColumnType.CLUSTERING_COLUMN) {
continue;
}
ColumnMetadata columnMetadata = tmd.getColumn(columnName);
SchemaStatement stmt = prop.getDataType().alterColumn(alter, prop.getColumnName(),
optional(columnMetadata));
if (stmt != null) {
result.add(stmt);
}
}
if (dropUnusedColumns) {
for (ColumnMetadata cm : tmd.getColumns()) {
if (!visitedColumns.contains(cm.getName())) {
result.add(alter.dropColumn(cm.getName()));
}
}
}
return result;
}
public static SchemaStatement dropTable(HelenusEntity entity) {
if (entity.getType() != HelenusEntityType.TABLE) {
throw new HelenusMappingException("expected table entity " + entity);
}
return SchemaBuilder.dropTable(entity.getName().toCql()).ifExists();
}
public static SchemaStatement createIndex(HelenusProperty prop) {
if (prop.caseSensitiveIndex()) {
return SchemaBuilder.createIndex(prop.getIndexName().get().toCql())
.ifNotExists()
.onTable(prop.getEntity().getName().toCql())
.andColumn(prop.getColumnName().toCql());
} else {
return new CreateSasiIndex(prop.getIndexName().get().toCql())
.ifNotExists()
.onTable(prop.getEntity().getName().toCql())
.andColumn(prop.getColumnName().toCql());
}
}
public static List<SchemaStatement> createIndexes(HelenusEntity entity) {
return entity.getOrderedProperties().stream().filter(p -> p.getIndexName().isPresent())
.map(p -> SchemaUtil.createIndex(p)).collect(Collectors.toList());
}
public static List<SchemaStatement> alterIndexes(TableMetadata tmd, HelenusEntity entity,
boolean dropUnusedIndexes) {
List<SchemaStatement> list = new ArrayList<SchemaStatement>();
final Set<String> visitedColumns = dropUnusedIndexes ? new HashSet<String>() : Collections.<String>emptySet();
entity.getOrderedProperties().stream().filter(p -> p.getIndexName().isPresent()).forEach(p -> {
String columnName = p.getColumnName().getName();
if (dropUnusedIndexes) {
visitedColumns.add(columnName);
}
ColumnMetadata cm = tmd.getColumn(columnName);
if (cm != null) {
IndexMetadata im = tmd.getIndex(columnName);
if (im == null) {
list.add(createIndex(p));
}
} else {
list.add(createIndex(p));
}
});
if (dropUnusedIndexes) {
tmd.getColumns().stream().filter(c -> tmd.getIndex(c.getName()) != null && !visitedColumns.contains(c.getName()))
.forEach(c -> {
list.add(SchemaBuilder.dropIndex(tmd.getIndex(c.getName()).getName()).ifExists());
});
}
return list;
}
public static SchemaStatement dropIndex(HelenusProperty prop) {
return SchemaBuilder.dropIndex(prop.getIndexName().get().toCql()).ifExists();
}
private static SchemaBuilder.Direction mapDirection(OrderingDirection o) {
switch (o) {
case ASC :
return SchemaBuilder.Direction.ASC;
case DESC :
return SchemaBuilder.Direction.DESC;
}
throw new HelenusMappingException("unknown ordering " + o);
}
public static void throwNoMapping(HelenusProperty prop) {
throw new HelenusMappingException(
"only primitive types and Set,List,Map collections and UserDefinedTypes are allowed, unknown type for property '"
+ prop.getPropertyName() + "' type is '" + prop.getJavaType() + "' in the entity "
+ prop.getEntity());
}
private static OptionalColumnMetadata optional(final ColumnMetadata columnMetadata) {
if (columnMetadata != null) {
return new OptionalColumnMetadata() {
@Override
public String getName() {
return columnMetadata.getName();
}
@Override
public DataType getType() {
return columnMetadata.getType();
}
};
}
return null;
}
private static OptionalColumnMetadata optional(final String name, final DataType dataType) {
if (dataType != null) {
return new OptionalColumnMetadata() {
@Override
public String getName() {
return name;
}
@Override
public DataType getType() {
return dataType;
}
};
}
return null;
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,56 +13,52 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core;
package net.helenus.core;
import java.io.PrintStream;
import java.lang.annotation.Annotation;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.*;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
import java.util.function.Consumer;
import java.util.stream.Collector;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import com.datastax.driver.core.KeyspaceMetadata;
import com.datastax.driver.core.Session;
import com.datastax.driver.core.TableMetadata;
import com.datastax.driver.core.UserType;
import com.datastax.driver.core.*;
import com.google.common.util.concurrent.MoreExecutors;
import com.noorq.casser.mapping.CasserEntity;
import com.noorq.casser.mapping.CasserEntityType;
import com.noorq.casser.mapping.value.ColumnValuePreparer;
import com.noorq.casser.mapping.value.ColumnValueProvider;
import com.noorq.casser.support.CasserException;
import com.noorq.casser.support.PackageUtil;
import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.HelenusEntityType;
import net.helenus.mapping.value.ColumnValuePreparer;
import net.helenus.mapping.value.ColumnValueProvider;
import net.helenus.support.HelenusException;
import net.helenus.support.PackageUtil;
public final class SessionInitializer extends AbstractSessionOperations {
private final Session session;
private CodecRegistry registry;
private String usingKeyspace;
private boolean showCql = false;
private PrintStream printStream = System.out;
private Executor executor = MoreExecutors.sameThreadExecutor();
private SessionRepositoryBuilder sessionRepository = new SessionRepositoryBuilder();
private SessionRepositoryBuilder sessionRepository;
private boolean dropUnusedColumns = false;
private boolean dropUnusedIndexes = false;
private KeyspaceMetadata keyspaceMetadata;
private final List<Object> initList = new ArrayList<Object>();
private AutoDdl autoDdl = AutoDdl.UPDATE;
SessionInitializer(Session session) {
this.session = Objects.requireNonNull(session, "empty session");
this.usingKeyspace = session.getLoggedKeyspace(); // can be null
this.sessionRepository = new SessionRepositoryBuilder(session);
}
@Override
public Session currentSession() {
return session;
@ -72,7 +68,7 @@ public final class SessionInitializer extends AbstractSessionOperations {
public String usingKeyspace() {
return usingKeyspace;
}
@Override
public Executor getExecutor() {
return executor;
@ -80,24 +76,24 @@ public final class SessionInitializer extends AbstractSessionOperations {
@Override
public SessionRepository getSessionRepository() {
throw new CasserException("not expected to call");
throw new HelenusException("not expected to call");
}
@Override
public ColumnValueProvider getValueProvider() {
throw new CasserException("not expected to call");
throw new HelenusException("not expected to call");
}
@Override
public ColumnValuePreparer getValuePreparer() {
throw new CasserException("not expected to call");
throw new HelenusException("not expected to call");
}
public SessionInitializer showCql() {
this.showCql = true;
return this;
}
public SessionInitializer showCql(boolean enabled) {
this.showCql = enabled;
return this;
@ -112,7 +108,7 @@ public final class SessionInitializer extends AbstractSessionOperations {
this.printStream = out;
return this;
}
public SessionInitializer withExecutor(Executor executor) {
Objects.requireNonNull(executor, "empty executor");
this.executor = executor;
@ -134,23 +130,26 @@ public final class SessionInitializer extends AbstractSessionOperations {
return this;
}
@Override
public SessionInitializer withCodecRegistry(CodecRegistry registry) {
this.registry = registry;
return this;
}
@Override
public boolean isShowCql() {
return showCql;
}
public SessionInitializer addPackage(String packageName) {
try {
PackageUtil.getClasses(packageName)
.stream()
.filter(c -> c.isInterface() && !c.isAnnotation())
.forEach(initList::add);
PackageUtil.getClasses(packageName).stream().filter(c -> c.isInterface() && !c.isAnnotation())
.forEach(initList::add);
} catch (ClassNotFoundException e) {
throw new CasserException("fail to add package " + packageName, e);
throw new HelenusException("fail to add package " + packageName, e);
}
return this;
}
public SessionInitializer add(Object... dsls) {
Objects.requireNonNull(dsls, "dsls is empty");
int len = dsls.length;
@ -160,7 +159,7 @@ public final class SessionInitializer extends AbstractSessionOperations {
}
return this;
}
public SessionInitializer autoValidate() {
this.autoDdl = AutoDdl.VALIDATE;
return this;
@ -185,132 +184,151 @@ public final class SessionInitializer extends AbstractSessionOperations {
this.autoDdl = autoDdl;
return this;
}
public SessionInitializer use(String keyspace) {
session.execute(SchemaUtil.use(keyspace, false));
this.usingKeyspace = keyspace;
return this;
}
public SessionInitializer use(String keyspace, boolean forceQuote) {
session.execute(SchemaUtil.use(keyspace, forceQuote));
this.usingKeyspace = keyspace;
return this;
}
public void singleton() {
Casser.setSession(get());
Helenus.setSession(get());
}
public synchronized CasserSession get() {
public synchronized HelenusSession get() {
initialize();
return new CasserSession(session,
usingKeyspace,
showCql,
printStream,
sessionRepository,
executor,
return new HelenusSession(session, usingKeyspace, registry, showCql, printStream, sessionRepository, executor,
autoDdl == AutoDdl.CREATE_DROP);
}
private void initialize() {
Objects.requireNonNull(usingKeyspace, "please define keyspace by 'use' operator");
initList.forEach(dsl -> sessionRepository.add(dsl));
TableOperations tableOps = new TableOperations(this, dropUnusedColumns, dropUnusedIndexes);
UserTypeOperations userTypeOps = new UserTypeOperations(this, dropUnusedColumns);
switch(autoDdl) {
case CREATE:
case CREATE_DROP:
eachUserTypeInOrder(userTypeOps, e -> userTypeOps.createUserType(e));
sessionRepository.entities().stream().filter(e -> e.getType() == CasserEntityType.TABLE)
.forEach(e -> tableOps.createTable(e));
break;
case VALIDATE:
eachUserTypeInOrder(userTypeOps, e -> userTypeOps.validateUserType(getUserType(e), e));
sessionRepository.entities().stream().filter(e -> e.getType() == CasserEntityType.TABLE)
.forEach(e -> tableOps.validateTable(getTableMetadata(e), e));
break;
case UPDATE:
eachUserTypeInOrder(userTypeOps, e -> userTypeOps.updateUserType(getUserType(e), e));
switch (autoDdl) {
case CREATE_DROP :
// Drop tables first, otherwise a `DROP TYPE ...` will fail as the type is still referenced
// by a table.
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.TABLE)
.forEach(e -> tableOps.dropTable(e));
eachUserTypeInReverseOrder(userTypeOps, e -> userTypeOps.dropUserType(e));
// FALLTHRU to CREATE case (read: the absence of a `break;` statement here is intentional!)
case CREATE :
eachUserTypeInOrder(userTypeOps, e -> userTypeOps.createUserType(e));
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.TABLE)
.forEach(e -> tableOps.createTable(e));
break;
case VALIDATE :
eachUserTypeInOrder(userTypeOps, e -> userTypeOps.validateUserType(getUserType(e), e));
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.TABLE)
.forEach(e -> tableOps.validateTable(getTableMetadata(e), e));
break;
case UPDATE :
eachUserTypeInOrder(userTypeOps, e -> userTypeOps.updateUserType(getUserType(e), e));
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.TABLE)
.forEach(e -> tableOps.updateTable(getTableMetadata(e), e));
break;
sessionRepository.entities().stream().filter(e -> e.getType() == CasserEntityType.TABLE)
.forEach(e -> tableOps.updateTable(getTableMetadata(e), e));
break;
}
KeyspaceMetadata km = getKeyspaceMetadata();
for (UserType userType : km.getUserTypes()) {
sessionRepository.addUserType(userType.getTypeName(), userType);
}
}
private void eachUserTypeInOrder(UserTypeOperations userTypeOps, Consumer<? super CasserEntity> action) {
Set<CasserEntity> processedSet = new HashSet<CasserEntity>();
Set<CasserEntity> stack = new HashSet<CasserEntity>();
sessionRepository.entities().stream()
.filter(e -> e.getType() == CasserEntityType.UDT)
.forEach(e -> {
stack.clear();
eachUserTypeInRecursion(e, processedSet, stack, userTypeOps, action);
});
}
private void eachUserTypeInRecursion(CasserEntity e, Set<CasserEntity> processedSet, Set<CasserEntity> stack, UserTypeOperations userTypeOps, Consumer<? super CasserEntity> action) {
private void eachUserTypeInOrder(UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) {
Set<HelenusEntity> processedSet = new HashSet<HelenusEntity>();
Set<HelenusEntity> stack = new HashSet<HelenusEntity>();
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.UDT).forEach(e -> {
stack.clear();
eachUserTypeInRecursion(e, processedSet, stack, userTypeOps, action);
});
}
private void eachUserTypeInReverseOrder(UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) {
ArrayDeque<HelenusEntity> deque = new ArrayDeque<>();
eachUserTypeInOrder(userTypeOps, e -> deque.addFirst(e));
deque.stream().forEach(e -> {action.accept(e); });
/*
Set<HelenusEntity> processedSet = new HashSet<HelenusEntity>();
Set<HelenusEntity> stack = new HashSet<HelenusEntity>();
sessionRepository.entities().stream()
.filter(e -> e.getType() == HelenusEntityType.UDT)
.collect(Collectors.toCollection(ArrayDeque::new))
.descendingIterator()
.forEachRemaining(e -> {
stack.clear();
eachUserTypeInRecursion(e, processedSet, stack, userTypeOps, action);
});
*/
}
private void eachUserTypeInRecursion(HelenusEntity e, Set<HelenusEntity> processedSet, Set<HelenusEntity> stack,
UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) {
stack.add(e);
Collection<CasserEntity> createBefore = sessionRepository.getUserTypeUses(e);
for (CasserEntity be : createBefore) {
Collection<HelenusEntity> createBefore = sessionRepository.getUserTypeUses(e);
for (HelenusEntity be : createBefore) {
if (!processedSet.contains(be) && !stack.contains(be)) {
eachUserTypeInRecursion(be, processedSet, stack, userTypeOps, action);
processedSet.add(be);
}
}
if (!processedSet.contains(e)) {
action.accept(e);
processedSet.add(e);
}
}
private KeyspaceMetadata getKeyspaceMetadata() {
if (keyspaceMetadata == null) {
keyspaceMetadata = session.getCluster().getMetadata().getKeyspace(usingKeyspace.toLowerCase());
}
return keyspaceMetadata;
}
private TableMetadata getTableMetadata(CasserEntity entity) {
private TableMetadata getTableMetadata(HelenusEntity entity) {
return getKeyspaceMetadata().getTable(entity.getName().getName());
}
private UserType getUserType(CasserEntity entity) {
private UserType getUserType(HelenusEntity entity) {
return getKeyspaceMetadata().getUserType(entity.getName().getName());
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,37 +13,34 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core;
package net.helenus.core;
import java.util.Collection;
import com.datastax.driver.core.UserType;
import com.google.common.collect.ImmutableMap;
import com.noorq.casser.mapping.CasserEntity;
import net.helenus.mapping.HelenusEntity;
public final class SessionRepository {
private final ImmutableMap<String, UserType> userTypeMap;
private final ImmutableMap<Class<?>, CasserEntity> entityMap;
private final ImmutableMap<Class<?>, HelenusEntity> entityMap;
public SessionRepository(SessionRepositoryBuilder builder) {
userTypeMap = ImmutableMap.<String, UserType>builder()
.putAll(builder.getUserTypeMap())
.build();
entityMap = ImmutableMap.<Class<?>, CasserEntity>builder()
.putAll(builder.getEntityMap())
.build();
userTypeMap = ImmutableMap.<String, UserType>builder().putAll(builder.getUserTypeMap()).build();
entityMap = ImmutableMap.<Class<?>, HelenusEntity>builder().putAll(builder.getEntityMap()).build();
}
public UserType findUserType(String name) {
return userTypeMap.get(name.toLowerCase());
}
public Collection<CasserEntity> entities() {
public Collection<HelenusEntity> entities() {
return entityMap.values();
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,47 +13,56 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core;
package net.helenus.core;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import com.datastax.driver.core.Session;
import com.datastax.driver.core.UDTValue;
import com.datastax.driver.core.UserType;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap;
import com.noorq.casser.mapping.CasserEntity;
import com.noorq.casser.mapping.CasserEntityType;
import com.noorq.casser.mapping.CasserProperty;
import com.noorq.casser.mapping.type.AbstractDataType;
import com.noorq.casser.mapping.type.DTDataType;
import com.noorq.casser.support.CasserMappingException;
import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.HelenusEntityType;
import net.helenus.mapping.HelenusProperty;
import net.helenus.mapping.type.AbstractDataType;
import net.helenus.mapping.type.DTDataType;
import net.helenus.support.HelenusMappingException;
public final class SessionRepositoryBuilder {
private static final Optional<CasserEntityType> OPTIONAL_UDT = Optional.of(CasserEntityType.UDT);
private final Map<Class<?>, CasserEntity> entityMap = new HashMap<Class<?>, CasserEntity>();
private static final Optional<HelenusEntityType> OPTIONAL_UDT = Optional.of(HelenusEntityType.UDT);
private final Map<Class<?>, HelenusEntity> entityMap = new HashMap<Class<?>, HelenusEntity>();
private final Map<String, UserType> userTypeMap = new HashMap<String, UserType>();
private final Multimap<CasserEntity, CasserEntity> userTypeUsesMap = HashMultimap.create();
private final Multimap<HelenusEntity, HelenusEntity> userTypeUsesMap = HashMultimap.create();
private final Session session;
SessionRepositoryBuilder(Session session) {
this.session = session;
}
public SessionRepository build() {
return new SessionRepository(this);
}
public Collection<CasserEntity> getUserTypeUses(CasserEntity udtName) {
public Collection<HelenusEntity> getUserTypeUses(HelenusEntity udtName) {
return userTypeUsesMap.get(udtName);
}
public Collection<CasserEntity> entities() {
public Collection<HelenusEntity> entities() {
return entityMap.values();
}
protected Map<Class<?>, CasserEntity> getEntityMap() {
protected Map<Class<?>, HelenusEntity> getEntityMap() {
return entityMap;
}
@ -65,80 +74,79 @@ public final class SessionRepositoryBuilder {
userTypeMap.putIfAbsent(name.toLowerCase(), userType);
}
public CasserEntity add(Object dsl) {
public HelenusEntity add(Object dsl) {
return add(dsl, Optional.empty());
}
public void addEntity(CasserEntity entity) {
CasserEntity concurrentEntity = entityMap.putIfAbsent(entity.getMappingInterface(), entity);
public void addEntity(HelenusEntity entity) {
HelenusEntity concurrentEntity = entityMap.putIfAbsent(entity.getMappingInterface(), entity);
if (concurrentEntity == null) {
addUserDefinedTypes(entity.getOrderedProperties());
}
}
public CasserEntity add(Object dsl, Optional<CasserEntityType> type) {
CasserEntity casserEntity = Casser.resolve(dsl);
Class<?> iface = casserEntity.getMappingInterface();
CasserEntity entity = entityMap.get(iface);
}
public HelenusEntity add(Object dsl, Optional<HelenusEntityType> type) {
HelenusEntity helenusEntity = Helenus.resolve(dsl, session.getCluster().getMetadata());
Class<?> iface = helenusEntity.getMappingInterface();
HelenusEntity entity = entityMap.get(iface);
if (entity == null) {
entity = casserEntity;
entity = helenusEntity;
if (type.isPresent() && entity.getType() != type.get()) {
throw new CasserMappingException("unexpected entity type " + entity.getType() + " for " + entity);
throw new HelenusMappingException("unexpected entity type " + entity.getType() + " for " + entity);
}
CasserEntity concurrentEntity = entityMap.putIfAbsent(iface, entity);
HelenusEntity concurrentEntity = entityMap.putIfAbsent(iface, entity);
if (concurrentEntity == null) {
addUserDefinedTypes(entity.getOrderedProperties());
}
else {
} else {
entity = concurrentEntity;
}
}
return entity;
}
private void addUserDefinedTypes(Collection<CasserProperty> props) {
for (CasserProperty prop : props) {
private void addUserDefinedTypes(Collection<HelenusProperty> props) {
for (HelenusProperty prop : props) {
AbstractDataType type = prop.getDataType();
if (type instanceof DTDataType) {
continue;
}
if (!UDTValue.class.isAssignableFrom(prop.getJavaType())) {
for (Class<?> udtClass : type.getTypeArguments()) {
if (UDTValue.class.isAssignableFrom(udtClass)) {
continue;
}
CasserEntity addedUserType = add(udtClass, OPTIONAL_UDT);
if (CasserEntityType.UDT == prop.getEntity().getType()) {
HelenusEntity addedUserType = add(udtClass, OPTIONAL_UDT);
if (HelenusEntityType.UDT == prop.getEntity().getType()) {
userTypeUsesMap.put(prop.getEntity(), addedUserType);
}
}
}
}
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,67 +13,76 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core;
package net.helenus.core;
import java.util.List;
import com.datastax.driver.core.TableMetadata;
import com.datastax.driver.core.schemabuilder.SchemaStatement;
import com.noorq.casser.mapping.CasserEntity;
import com.noorq.casser.support.CasserException;
import net.helenus.mapping.HelenusEntity;
import net.helenus.support.HelenusException;
public final class TableOperations {
private final AbstractSessionOperations sessionOps;
private final boolean dropUnusedColumns;
private final boolean dropUnusedIndexes;
public TableOperations(AbstractSessionOperations sessionOps, boolean dropUnusedColumns, boolean dropUnusedIndexes) {
this.sessionOps = sessionOps;
this.dropUnusedColumns = dropUnusedColumns;
this.dropUnusedIndexes = dropUnusedIndexes;
}
public void createTable(CasserEntity entity) {
public void createTable(HelenusEntity entity) {
sessionOps.execute(SchemaUtil.createTable(entity), true);
executeBatch(SchemaUtil.createIndexes(entity));
}
public void validateTable(TableMetadata tmd, CasserEntity entity) {
public void dropTable(HelenusEntity entity) {
sessionOps.execute(SchemaUtil.dropTable(entity), true);
}
public void validateTable(TableMetadata tmd, HelenusEntity entity) {
if (tmd == null) {
throw new CasserException("table not exists " + entity.getName() + "for entity " + entity.getMappingInterface());
throw new HelenusException(
"table not exists " + entity.getName() + "for entity " + entity.getMappingInterface());
}
List<SchemaStatement> list = SchemaUtil.alterTable(tmd, entity, dropUnusedColumns);
list.addAll(SchemaUtil.alterIndexes(tmd, entity, dropUnusedIndexes));
if (!list.isEmpty()) {
throw new CasserException("schema changed for entity " + entity.getMappingInterface() + ", apply this command: " + list);
throw new HelenusException(
"schema changed for entity " + entity.getMappingInterface() + ", apply this command: " + list);
}
}
public void updateTable(TableMetadata tmd, CasserEntity entity) {
public void updateTable(TableMetadata tmd, HelenusEntity entity) {
if (tmd == null) {
createTable(entity);
return;
}
executeBatch(SchemaUtil.alterTable(tmd, entity, dropUnusedColumns));
executeBatch(SchemaUtil.alterIndexes(tmd, entity, dropUnusedIndexes));
}
private void executeBatch(List<SchemaStatement> list) {
list.forEach(s -> {
sessionOps.execute(s, true);
});
}
}

View file

@ -0,0 +1,60 @@
package net.helenus.core;
import java.util.ArrayList;
/**
* Encapsulates the concept of a "transaction" as a unit-of-work.
*/
public class UnitOfWork {
private final HelenusSession session;
private ArrayList<UnitOfWork> nested;
UnitOfWork(HelenusSession session) {
this.session = session;
// log.record(txn::start)
}
/**
* Marks the beginning of a transactional section of work. Will write a record
* to the shared write-ahead log.
*
* @return the handle used to commit or abort the work.
*/
public UnitOfWork begin() {
if (nested == null) {
nested = new ArrayList<UnitOfWork>();
}
UnitOfWork unitOfWork = new UnitOfWork(session);
nested.add(unitOfWork);
return unitOfWork;
}
/**
* Checks to see if the work performed between calling begin and now can be
* committed or not.
*
* @throws ConflictingUnitOfWorkException
* when the work overlaps with other concurrent writers.
*/
public void commit() throws ConflictingUnitOfWorkException {
// nested.foreach.commit()
// log.record(txn::provisionalCommit)
// examine log for conflicts in read-set and write-set between begin and
// provisional commit
// if (conflict) { throw new ConflictingUnitOfWorkException(this) }
}
/**
* Explicitly discard the work and mark it as as such in the log.
*/
public void abort() {
// log.record(txn::abort)
// cache.invalidateSince(txn::start time)
}
public String describeConflicts() {
return "it's complex...";
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,63 +13,71 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core;
package net.helenus.core;
import java.util.List;
import com.datastax.driver.core.UserType;
import com.datastax.driver.core.schemabuilder.SchemaStatement;
import com.noorq.casser.mapping.CasserEntity;
import com.noorq.casser.support.CasserException;
import net.helenus.mapping.HelenusEntity;
import net.helenus.support.HelenusException;
public final class UserTypeOperations {
private final AbstractSessionOperations sessionOps;
private final boolean dropUnusedColumns;
public UserTypeOperations(AbstractSessionOperations sessionOps, boolean dropUnusedColumns) {
this.sessionOps = sessionOps;
this.dropUnusedColumns = dropUnusedColumns;
}
public void createUserType(CasserEntity entity) {
public void createUserType(HelenusEntity entity) {
sessionOps.execute(SchemaUtil.createUserType(entity), true);
}
public void validateUserType(UserType userType, CasserEntity entity) {
public void dropUserType(HelenusEntity entity) {
sessionOps.execute(SchemaUtil.dropUserType(entity), true);
}
public void validateUserType(UserType userType, HelenusEntity entity) {
if (userType == null) {
throw new CasserException("userType not exists " + entity.getName() + "for entity " + entity.getMappingInterface());
throw new HelenusException(
"userType not exists " + entity.getName() + "for entity " + entity.getMappingInterface());
}
List<SchemaStatement> list = SchemaUtil.alterUserType(userType, entity, dropUnusedColumns);
if (!list.isEmpty()) {
throw new CasserException("schema changed for entity " + entity.getMappingInterface() + ", apply this command: " + list);
throw new HelenusException(
"schema changed for entity " + entity.getMappingInterface() + ", apply this command: " + list);
}
}
public void updateUserType(UserType userType, CasserEntity entity) {
public void updateUserType(UserType userType, HelenusEntity entity) {
if (userType == null) {
createUserType(entity);
return;
}
executeBatch(SchemaUtil.alterUserType(userType, entity, dropUnusedColumns));
}
private void executeBatch(List<SchemaStatement> list) {
list.forEach(s -> {
sessionOps.execute(s, true);
});
}
}

View file

@ -0,0 +1,11 @@
package net.helenus.core.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @interface Cacheable {
}

View file

@ -0,0 +1,17 @@
package net.helenus.core.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import net.helenus.core.ConflictingUnitOfWorkException;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface Retry {
Class<? extends Exception>[] on() default ConflictingUnitOfWorkException.class;
int times() default 3;
}

View file

@ -0,0 +1,83 @@
package net.helenus.core.aspect;
import java.lang.reflect.Method;
import java.util.Arrays;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.reflect.MethodSignature;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.annotation.AnnotationUtils;
import org.springframework.util.Assert;
import net.helenus.core.annotation.Retry;
@Aspect
public class RetryConcurrentUnitOfWorkAspect {
private static final Logger log = LoggerFactory.getLogger(RetryConcurrentUnitOfWorkAspect.class);
@Around("@annotation(net.helenus.core.annotations.Retry)")
public Object retry(ProceedingJoinPoint pjp) throws Throwable {
Retry retryAnnotation = getRetryAnnotation(pjp);
return (retryAnnotation != null) ? proceed(pjp, retryAnnotation) : proceed(pjp);
}
private Object proceed(ProceedingJoinPoint pjp) throws Throwable {
return pjp.proceed();
}
private Object proceed(ProceedingJoinPoint pjp, Retry retryAnnotation) throws Throwable {
int times = retryAnnotation.times();
Class<? extends Throwable>[] retryOn = retryAnnotation.on();
Assert.isTrue(times > 0, "@Retry{times} should be greater than 0!");
Assert.isTrue(retryOn.length > 0, "@Retry{on} should have at least one Throwable!");
log.info("Proceed with {} retries on {}", times, Arrays.toString(retryOn));
return tryProceeding(pjp, times, retryOn);
}
private Object tryProceeding(ProceedingJoinPoint pjp, int times, Class<? extends Throwable>[] retryOn)
throws Throwable {
try {
return proceed(pjp);
} catch (Throwable throwable) {
if (isRetryThrowable(throwable, retryOn) && times-- > 0) {
log.info("Conflict detected, {} remaining retries on {}", times, Arrays.toString(retryOn));
return tryProceeding(pjp, times, retryOn);
}
throw throwable;
}
}
private boolean isRetryThrowable(Throwable throwable, Class<? extends Throwable>[] retryOn) {
Throwable[] causes = ExceptionUtils.getThrowables(throwable);
for (Throwable cause : causes) {
for (Class<? extends Throwable> retryThrowable : retryOn) {
if (retryThrowable.isAssignableFrom(cause.getClass())) {
return true;
}
}
}
return false;
}
private Retry getRetryAnnotation(ProceedingJoinPoint pjp) throws NoSuchMethodException {
MethodSignature signature = (MethodSignature) pjp.getSignature();
Method method = signature.getMethod();
Retry retryAnnotation = AnnotationUtils.findAnnotation(method, Retry.class);
if (retryAnnotation != null) {
return retryAnnotation;
}
Class[] argClasses = new Class[pjp.getArgs().length];
for (int i = 0; i < pjp.getArgs().length; i++) {
argClasses[i] = pjp.getArgs()[i].getClass();
}
method = pjp.getTarget().getClass().getMethod(pjp.getSignature().getName(), argClasses);
return AnnotationUtils.findAnnotation(method, Retry.class);
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,40 +13,38 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core.operation;
package net.helenus.core.operation;
import java.util.LinkedList;
import java.util.List;
import com.noorq.casser.core.AbstractSessionOperations;
import com.noorq.casser.core.Filter;
import com.noorq.casser.core.Getter;
import com.noorq.casser.core.Operator;
import com.noorq.casser.core.Postulate;
import net.helenus.core.*;
public abstract class AbstractFilterOperation<E, O extends AbstractFilterOperation<E, O>> extends AbstractOperation<E, O> {
public abstract class AbstractFilterOperation<E, O extends AbstractFilterOperation<E, O>>
extends
AbstractOperation<E, O> {
protected List<Filter<?>> filters = null;
protected List<Filter<?>> ifFilters = null;
public AbstractFilterOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations);
}
public <V> O where(Getter<V> getter, Postulate<V> postulate) {
addFilter(Filter.create(getter, postulate));
return (O) this;
}
public <V> O where(Getter<V> getter, Operator operator, V val) {
addFilter(Filter.create(getter, operator, val));
return (O) this;
}
public <V> O where(Filter<V> filter) {
addFilter(filter);
@ -55,54 +53,54 @@ public abstract class AbstractFilterOperation<E, O extends AbstractFilterOperati
}
public <V> O and(Getter<V> getter, Postulate<V> postulate) {
addFilter(Filter.create(getter, postulate));
return (O) this;
}
public <V> O and(Getter<V> getter, Operator operator, V val) {
addFilter(Filter.create(getter, operator, val));
return (O) this;
}
public <V> O and(Filter<V> filter) {
addFilter(filter);
return (O) this;
}
public <V> O onlyIf(Getter<V> getter, Postulate<V> postulate) {
addIfFilter(Filter.create(getter, postulate));
return (O) this;
}
public <V> O onlyIf(Getter<V> getter, Operator operator, V val) {
addIfFilter(Filter.create(getter, operator, val));
return (O) this;
}
public <V> O onlyIf(Filter<V> filter) {
addIfFilter(filter);
return (O) this;
}
private void addFilter(Filter<?> filter) {
if (filters == null) {
filters = new LinkedList<Filter<?>>();
}
filters.add(filter);
}
private void addIfFilter(Filter<?> filter) {
if (ifFilters == null) {
ifFilters = new LinkedList<Filter<?>>();

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,40 +13,38 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core.operation;
package net.helenus.core.operation;
import java.util.LinkedList;
import java.util.List;
import com.noorq.casser.core.AbstractSessionOperations;
import com.noorq.casser.core.Filter;
import com.noorq.casser.core.Getter;
import com.noorq.casser.core.Operator;
import com.noorq.casser.core.Postulate;
import net.helenus.core.*;
public abstract class AbstractFilterOptionalOperation<E, O extends AbstractFilterOptionalOperation<E, O>> extends AbstractOptionalOperation<E, O> {
public abstract class AbstractFilterOptionalOperation<E, O extends AbstractFilterOptionalOperation<E, O>>
extends
AbstractOptionalOperation<E, O> {
protected List<Filter<?>> filters = null;
protected List<Filter<?>> ifFilters = null;
public AbstractFilterOptionalOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations);
}
public <V> O where(Getter<V> getter, Postulate<V> postulate) {
addFilter(Filter.create(getter, postulate));
return (O) this;
}
public <V> O where(Getter<V> getter, Operator operator, V val) {
addFilter(Filter.create(getter, operator, val));
return (O) this;
}
public <V> O where(Filter<V> filter) {
addFilter(filter);
@ -55,47 +53,47 @@ public abstract class AbstractFilterOptionalOperation<E, O extends AbstractFilte
}
public <V> O and(Getter<V> getter, Postulate<V> postulate) {
addFilter(Filter.create(getter, postulate));
return (O) this;
}
public <V> O and(Getter<V> getter, Operator operator, V val) {
addFilter(Filter.create(getter, operator, val));
return (O) this;
}
public <V> O and(Filter<V> filter) {
addFilter(filter);
return (O) this;
}
public <V> O onlyIf(Getter<V> getter, Postulate<V> postulate) {
addIfFilter(Filter.create(getter, postulate));
return (O) this;
}
public <V> O onlyIf(Getter<V> getter, Operator operator, V val) {
addIfFilter(Filter.create(getter, operator, val));
return (O) this;
}
public <V> O onlyIf(Filter<V> filter) {
addIfFilter(filter);
return (O) this;
}
private void addFilter(Filter<?> filter) {
if (filters == null) {
filters = new LinkedList<Filter<?>>();

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,40 +13,38 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core.operation;
package net.helenus.core.operation;
import java.util.LinkedList;
import java.util.List;
import com.noorq.casser.core.AbstractSessionOperations;
import com.noorq.casser.core.Filter;
import com.noorq.casser.core.Getter;
import com.noorq.casser.core.Operator;
import com.noorq.casser.core.Postulate;
import net.helenus.core.*;
public abstract class AbstractFilterStreamOperation<E, O extends AbstractFilterStreamOperation<E, O>> extends AbstractStreamOperation<E, O> {
public abstract class AbstractFilterStreamOperation<E, O extends AbstractFilterStreamOperation<E, O>>
extends
AbstractStreamOperation<E, O> {
protected List<Filter<?>> filters = null;
protected List<Filter<?>> ifFilters = null;
public AbstractFilterStreamOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations);
}
public <V> O where(Getter<V> getter, Postulate<V> postulate) {
addFilter(Filter.create(getter, postulate));
return (O) this;
}
public <V> O where(Getter<V> getter, Operator operator, V val) {
addFilter(Filter.create(getter, operator, val));
return (O) this;
}
public <V> O where(Filter<V> filter) {
addFilter(filter);
@ -55,47 +53,47 @@ public abstract class AbstractFilterStreamOperation<E, O extends AbstractFilterS
}
public <V> O and(Getter<V> getter, Postulate<V> postulate) {
addFilter(Filter.create(getter, postulate));
return (O) this;
}
public <V> O and(Getter<V> getter, Operator operator, V val) {
addFilter(Filter.create(getter, operator, val));
return (O) this;
}
public <V> O and(Filter<V> filter) {
addFilter(filter);
return (O) this;
}
public <V> O onlyIf(Getter<V> getter, Postulate<V> postulate) {
addIfFilter(Filter.create(getter, postulate));
return (O) this;
}
public <V> O onlyIf(Getter<V> getter, Operator operator, V val) {
addIfFilter(Filter.create(getter, operator, val));
return (O) this;
}
public <V> O onlyIf(Filter<V> filter) {
addIfFilter(filter);
return (O) this;
}
private void addFilter(Filter<?> filter) {
if (filters == null) {
filters = new LinkedList<Filter<?>>();

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,9 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core.operation;
import scala.concurrent.Future;
package net.helenus.core.operation;
import com.datastax.driver.core.PreparedStatement;
import com.datastax.driver.core.ResultSet;
@ -23,48 +21,61 @@ import com.datastax.driver.core.ResultSetFuture;
import com.google.common.base.Function;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.noorq.casser.core.AbstractSessionOperations;
import com.noorq.casser.support.Fun;
import com.noorq.casser.support.Scala;
import net.helenus.core.AbstractSessionOperations;
import net.helenus.support.Fun;
import net.helenus.support.Scala;
import scala.concurrent.Future;
public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>> extends AbstractStatementOperation<E, O> {
public abstract E transform(ResultSet resultSet);
public boolean cacheable() {
return false;
}
public String getCacheKey() {
return "";
}
public AbstractOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations);
}
public PreparedOperation<E> prepare() {
return new PreparedOperation<E>(prepareStatement(), this);
}
public ListenableFuture<PreparedOperation<E>> prepareAsync() {
final O _this = (O) this;
return Futures.transform(prepareStatementAsync(), new Function<PreparedStatement, PreparedOperation<E>>() {
@Override
public PreparedOperation<E> apply(PreparedStatement preparedStatement) {
return new PreparedOperation<E>(preparedStatement, _this);
}
});
}
public Future<PreparedOperation<E>> prepareFuture() {
return Scala.asFuture(prepareAsync());
}
public E sync() {
ResultSet resultSet = sessionOps.executeAsync(options(buildStatement()), showValues).getUninterruptibly();
return transform(resultSet);
ResultSet resultSet = sessionOps.executeAsync(options(buildStatement()), showValues).getUninterruptibly();
E result = transform(resultSet);
if (cacheable()) {
sessionOps.cache(getCacheKey(), result);
}
return result;
}
public ListenableFuture<E> async() {
ResultSetFuture resultSetFuture = sessionOps.executeAsync(options(buildStatement()), showValues);
@ -73,18 +84,22 @@ public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>> ex
@Override
public E apply(ResultSet resultSet) {
E result = transform(resultSet);
if (cacheable()) {
sessionOps.cache(getCacheKey(), result);
}
return transform(resultSet);
}
}, sessionOps.getExecutor());
return future;
}
public Future<E> future() {
return Scala.asFuture(async());
}
public <A> Future<Fun.Tuple2<E, A>> future(A a) {
return Scala.asFuture(async(), a);
}
@ -100,5 +115,5 @@ public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>> ex
public <A, B, C, D> Future<Fun.Tuple5<E, A, B, C, D>> future(A a, B b, C c, D d) {
return Scala.asFuture(async(), a, b, c, d);
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,81 +13,84 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core.operation;
package net.helenus.core.operation;
import java.util.Optional;
import scala.None;
import scala.Option;
import scala.Some;
import scala.concurrent.Future;
import com.datastax.driver.core.PreparedStatement;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.ResultSetFuture;
import com.google.common.base.Function;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.noorq.casser.core.AbstractSessionOperations;
import com.noorq.casser.support.Fun;
import com.noorq.casser.support.Scala;
public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOperation<E, O>> extends AbstractStatementOperation<E, O> {
import net.helenus.core.AbstractSessionOperations;
import net.helenus.support.Fun;
import net.helenus.support.Scala;
import scala.Option;
import scala.Some;
import scala.concurrent.Future;
public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOperation<E, O>>
extends
AbstractStatementOperation<E, O> {
public AbstractOptionalOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations);
}
public abstract Optional<E> transform(ResultSet resultSet);
public PreparedOptionalOperation<E> prepare() {
return new PreparedOptionalOperation<E>(prepareStatement(), this);
}
public ListenableFuture<PreparedOptionalOperation<E>> prepareAsync() {
final O _this = (O) this;
return Futures.transform(prepareStatementAsync(), new Function<PreparedStatement, PreparedOptionalOperation<E>>() {
@Override
public PreparedOptionalOperation<E> apply(PreparedStatement preparedStatement) {
return new PreparedOptionalOperation<E>(preparedStatement, _this);
}
});
return Futures.transform(prepareStatementAsync(),
new Function<PreparedStatement, PreparedOptionalOperation<E>>() {
@Override
public PreparedOptionalOperation<E> apply(PreparedStatement preparedStatement) {
return new PreparedOptionalOperation<E>(preparedStatement, _this);
}
});
}
public Future<PreparedOptionalOperation<E>> prepareFuture() {
return Scala.asFuture(prepareAsync());
}
public Optional<E> sync() {
ResultSet resultSet = sessionOps.executeAsync(options(buildStatement()), showValues).getUninterruptibly();
return transform(resultSet);
}
public ListenableFuture<Optional<E>> async() {
ResultSetFuture resultSetFuture = sessionOps.executeAsync(options(buildStatement()), showValues);
ListenableFuture<Optional<E>> future = Futures.transform(resultSetFuture, new Function<ResultSet, Optional<E>>() {
ListenableFuture<Optional<E>> future = Futures.transform(resultSetFuture,
new Function<ResultSet, Optional<E>>() {
@Override
public Optional<E> apply(ResultSet resultSet) {
return transform(resultSet);
}
@Override
public Optional<E> apply(ResultSet resultSet) {
return transform(resultSet);
}
}, sessionOps.getExecutor());
}, sessionOps.getExecutor());
return future;
}
public ListenableFuture<Option<E>> asyncForScala() {
ResultSetFuture resultSetFuture = sessionOps.executeAsync(options(buildStatement()), showValues);
ListenableFuture<Option<E>> future = Futures.transform(resultSetFuture, new Function<ResultSet, Option<E>>() {
@ -97,14 +100,13 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
Optional<E> optional = transform(resultSet);
if (optional.isPresent()) {
return new Some<E>(optional.get());
}
else {
} else {
return Option.empty();
}
}
}, sessionOps.getExecutor());
return future;
}
public Future<Option<E>> future() {
@ -126,5 +128,5 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
public <A, B, C, D> Future<Fun.Tuple5<Option<E>, A, B, C, D>> future(A a, B b, C c, D d) {
return Scala.asFuture(asyncForScala(), a, b, c, d);
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,13 +13,11 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core.operation;
package net.helenus.core.operation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import scala.concurrent.Future;
import com.datastax.driver.core.ConsistencyLevel;
import com.datastax.driver.core.PreparedStatement;
import com.datastax.driver.core.RegularStatement;
@ -30,18 +28,20 @@ import com.datastax.driver.core.policies.FallthroughRetryPolicy;
import com.datastax.driver.core.policies.RetryPolicy;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.google.common.util.concurrent.ListenableFuture;
import com.noorq.casser.core.AbstractSessionOperations;
import com.noorq.casser.support.CasserException;
import com.noorq.casser.support.Scala;
import net.helenus.core.AbstractSessionOperations;
import net.helenus.support.HelenusException;
import net.helenus.support.Scala;
import scala.concurrent.Future;
public abstract class AbstractStatementOperation<E, O extends AbstractStatementOperation<E, O>> {
final Logger logger = LoggerFactory.getLogger(getClass());
protected final AbstractSessionOperations sessionOps;
public abstract Statement buildStatement();
protected boolean showValues = true;
private ConsistencyLevel consistencyLevel;
private ConsistencyLevel serialConsistencyLevel;
@ -49,22 +49,22 @@ public abstract class AbstractStatementOperation<E, O extends AbstractStatementO
private boolean enableTracing = false;
private long[] defaultTimestamp = null;
private int[] fetchSize = null;
public AbstractStatementOperation(AbstractSessionOperations sessionOperations) {
this.sessionOps = sessionOperations;
}
public O showValues(boolean enabled) {
this.showValues = enabled;
return (O) this;
}
public O defaultTimestamp(long timestamp) {
this.defaultTimestamp = new long[1];
this.defaultTimestamp[0] = timestamp;
return (O) this;
}
public O retryPolicy(RetryPolicy retryPolicy) {
this.retryPolicy = retryPolicy;
return (O) this;
@ -133,8 +133,8 @@ public abstract class AbstractStatementOperation<E, O extends AbstractStatementO
public O serialConsistencyAll() {
this.serialConsistencyLevel = ConsistencyLevel.ALL;
return (O) this;
}
}
public O disableTracing() {
this.enableTracing = false;
return (O) this;
@ -155,85 +155,81 @@ public abstract class AbstractStatementOperation<E, O extends AbstractStatementO
this.fetchSize[0] = fetchSize;
return (O) this;
}
protected Statement options(Statement statement) {
if (defaultTimestamp != null) {
statement.setDefaultTimestamp(defaultTimestamp[0]);
}
if (consistencyLevel != null) {
statement.setConsistencyLevel(consistencyLevel);
}
if (serialConsistencyLevel != null) {
statement.setSerialConsistencyLevel(serialConsistencyLevel);
}
if (retryPolicy != null) {
statement.setRetryPolicy(retryPolicy);
}
if (enableTracing) {
statement.enableTracing();
}
else {
} else {
statement.disableTracing();
}
if (fetchSize != null) {
statement.setFetchSize(fetchSize[0]);
}
return statement;
}
public Statement statement() {
return buildStatement();
}
public String cql() {
Statement statement = buildStatement();
Statement statement = buildStatement();
if (statement instanceof BuiltStatement) {
BuiltStatement buildStatement = (BuiltStatement) statement;
return buildStatement.setForceNoValues(true).getQueryString();
}
else {
} else {
return statement.toString();
}
}
public PreparedStatement prepareStatement() {
Statement statement = buildStatement();
if (statement instanceof RegularStatement) {
RegularStatement regularStatement = (RegularStatement) statement;
return sessionOps.prepare(regularStatement);
}
throw new CasserException("only RegularStatements can be prepared");
throw new HelenusException("only RegularStatements can be prepared");
}
public ListenableFuture<PreparedStatement> prepareStatementAsync() {
Statement statement = buildStatement();
if (statement instanceof RegularStatement) {
RegularStatement regularStatement = (RegularStatement) statement;
return sessionOps.prepareAsync(regularStatement);
}
throw new CasserException("only RegularStatements can be prepared");
throw new HelenusException("only RegularStatements can be prepared");
}
public Future<PreparedStatement> prepareStatementFuture() {
return Scala.asFuture(prepareStatementAsync());
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,62 +13,65 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core.operation;
package net.helenus.core.operation;
import java.util.stream.Stream;
import scala.concurrent.Future;
import com.datastax.driver.core.PreparedStatement;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.ResultSetFuture;
import com.google.common.base.Function;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.noorq.casser.core.AbstractSessionOperations;
import com.noorq.casser.support.Fun;
import com.noorq.casser.support.Scala;
public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperation<E, O>> extends AbstractStatementOperation<E, O> {
import net.helenus.core.AbstractSessionOperations;
import net.helenus.support.Fun;
import net.helenus.support.Scala;
import scala.concurrent.Future;
public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperation<E, O>>
extends
AbstractStatementOperation<E, O> {
public AbstractStreamOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations);
}
public abstract Stream<E> transform(ResultSet resultSet);
public PreparedStreamOperation<E> prepare() {
return new PreparedStreamOperation<E>(prepareStatement(), this);
}
public ListenableFuture<PreparedStreamOperation<E>> prepareAsync() {
final O _this = (O) this;
return Futures.transform(prepareStatementAsync(), new Function<PreparedStatement, PreparedStreamOperation<E>>() {
@Override
public PreparedStreamOperation<E> apply(PreparedStatement preparedStatement) {
return new PreparedStreamOperation<E>(preparedStatement, _this);
}
});
return Futures.transform(prepareStatementAsync(),
new Function<PreparedStatement, PreparedStreamOperation<E>>() {
@Override
public PreparedStreamOperation<E> apply(PreparedStatement preparedStatement) {
return new PreparedStreamOperation<E>(preparedStatement, _this);
}
});
}
public Future<PreparedStreamOperation<E>> prepareFuture() {
return Scala.asFuture(prepareAsync());
}
public Stream<E> sync() {
ResultSet resultSet = sessionOps.executeAsync(options(buildStatement()), showValues).getUninterruptibly();
return transform(resultSet);
}
public ListenableFuture<Stream<E>> async() {
ResultSetFuture resultSetFuture = sessionOps.executeAsync(options(buildStatement()), showValues);
ListenableFuture<Stream<E>> future = Futures.transform(resultSetFuture, new Function<ResultSet, Stream<E>>() {
@ -79,27 +82,28 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
}
}, sessionOps.getExecutor());
return future;
}
public ListenableFuture<scala.collection.immutable.Stream<E>> asyncForScala() {
ResultSetFuture resultSetFuture = sessionOps.executeAsync(options(buildStatement()), showValues);
ListenableFuture<scala.collection.immutable.Stream<E>> future = Futures.transform(resultSetFuture, new Function<ResultSet, scala.collection.immutable.Stream<E>>() {
ListenableFuture<scala.collection.immutable.Stream<E>> future = Futures.transform(resultSetFuture,
new Function<ResultSet, scala.collection.immutable.Stream<E>>() {
@Override
public scala.collection.immutable.Stream<E> apply(ResultSet resultSet) {
Stream<E> stream = transform(resultSet);
return scala.collection.JavaConversions.asScalaIterator(stream.iterator()).toStream();
}
@Override
public scala.collection.immutable.Stream<E> apply(ResultSet resultSet) {
Stream<E> stream = transform(resultSet);
return scala.collection.JavaConversions.asScalaIterator(stream.iterator()).toStream();
}
}, sessionOps.getExecutor());
}, sessionOps.getExecutor());
return future;
}
public Future<scala.collection.immutable.Stream<E>> future() {
return Scala.asFuture(asyncForScala());
}
@ -116,8 +120,9 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
return Scala.asFuture(asyncForScala(), a, b, c);
}
public <A, B, C, D> Future<Fun.Tuple5<scala.collection.immutable.Stream<E>, A, B, C, D>> future(A a, B b, C c, D d) {
public <A, B, C, D> Future<Fun.Tuple5<scala.collection.immutable.Stream<E>, A, B, C, D>> future(A a, B b, C c,
D d) {
return Scala.asFuture(asyncForScala(), a, b, c, d);
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core.operation;
package net.helenus.core.operation;
import com.datastax.driver.core.BoundStatement;
import com.datastax.driver.core.ResultSet;
@ -23,21 +23,21 @@ public final class BoundOperation<E> extends AbstractOperation<E, BoundOperation
private final BoundStatement boundStatement;
private final AbstractOperation<E, ?> delegate;
public BoundOperation(BoundStatement boundStatement, AbstractOperation<E, ?> operation) {
super(operation.sessionOps);
this.boundStatement = boundStatement;
this.delegate = operation;
}
@Override
public E transform(ResultSet resultSet) {
return delegate.transform(resultSet);
}
@Override
public Statement buildStatement() {
return boundStatement;
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core.operation;
package net.helenus.core.operation;
import java.util.Optional;
@ -25,13 +25,13 @@ public final class BoundOptionalOperation<E> extends AbstractOptionalOperation<E
private final BoundStatement boundStatement;
private final AbstractOptionalOperation<E, ?> delegate;
public BoundOptionalOperation(BoundStatement boundStatement, AbstractOptionalOperation<E, ?> operation) {
super(operation.sessionOps);
this.boundStatement = boundStatement;
this.delegate = operation;
}
@Override
public Optional<E> transform(ResultSet resultSet) {
return delegate.transform(resultSet);
@ -41,5 +41,5 @@ public final class BoundOptionalOperation<E> extends AbstractOptionalOperation<E
public Statement buildStatement() {
return boundStatement;
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core.operation;
package net.helenus.core.operation;
import java.util.stream.Stream;
@ -25,13 +25,13 @@ public final class BoundStreamOperation<E> extends AbstractStreamOperation<E, Bo
private final BoundStatement boundStatement;
private final AbstractStreamOperation<E, ?> delegate;
public BoundStreamOperation(BoundStatement boundStatement, AbstractStreamOperation<E, ?> operation) {
super(operation.sessionOps);
this.boundStatement = boundStatement;
this.delegate = operation;
}
@Override
public Stream<E> transform(ResultSet resultSet) {
return delegate.transform(resultSet);
@ -41,5 +41,5 @@ public final class BoundStreamOperation<E> extends AbstractStreamOperation<E, Bo
public Statement buildStatement() {
return boundStatement;
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,68 +13,69 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core.operation;
package net.helenus.core.operation;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.datastax.driver.core.querybuilder.QueryBuilder;
import com.datastax.driver.core.querybuilder.Select;
import com.datastax.driver.core.querybuilder.Select.Where;
import com.noorq.casser.core.AbstractSessionOperations;
import com.noorq.casser.core.Filter;
import com.noorq.casser.core.reflect.CasserPropertyNode;
import com.noorq.casser.mapping.CasserEntity;
import com.noorq.casser.support.CasserMappingException;
import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.Filter;
import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.HelenusEntity;
import net.helenus.support.HelenusMappingException;
public final class CountOperation extends AbstractFilterOperation<Long, CountOperation> {
private CasserEntity entity;
private HelenusEntity entity;
public CountOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations);
}
public CountOperation(AbstractSessionOperations sessionOperations, CasserEntity entity) {
public CountOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity) {
super(sessionOperations);
this.entity = entity;
}
@Override
public BuiltStatement buildStatement() {
if (filters != null && !filters.isEmpty()) {
filters.forEach(f -> addPropertyNode(f.getNode()));
}
if (entity == null) {
throw new CasserMappingException("unknown entity");
throw new HelenusMappingException("unknown entity");
}
Select select = QueryBuilder.select().countAll().from(entity.getName().toCql());
if (filters != null && !filters.isEmpty()) {
Where where = select.where();
for (Filter<?> filter : filters) {
where.and(filter.getClause(sessionOps.getValuePreparer()));
}
}
return select;
}
@Override
public Long transform(ResultSet resultSet) {
return resultSet.one().getLong(0);
}
private void addPropertyNode(CasserPropertyNode p) {
private void addPropertyNode(HelenusPropertyNode p) {
if (entity == null) {
entity = p.getEntity();
}
else if (entity != p.getEntity()) {
throw new CasserMappingException("you can count columns only in single entity " + entity.getMappingInterface() + " or " + p.getEntity().getMappingInterface());
} else if (entity != p.getEntity()) {
throw new HelenusMappingException("you can count columns only in single entity "
+ entity.getMappingInterface() + " or " + p.getEntity().getMappingInterface());
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,71 +13,71 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core.operation;
package net.helenus.core.operation;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.datastax.driver.core.querybuilder.Delete;
import com.datastax.driver.core.querybuilder.Delete.Where;
import com.datastax.driver.core.querybuilder.QueryBuilder;
import com.noorq.casser.core.AbstractSessionOperations;
import com.noorq.casser.core.Filter;
import com.noorq.casser.core.reflect.CasserPropertyNode;
import com.noorq.casser.mapping.CasserEntity;
import com.noorq.casser.support.CasserMappingException;
import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.Filter;
import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.HelenusEntity;
import net.helenus.support.HelenusMappingException;
public final class DeleteOperation extends AbstractFilterOperation<ResultSet, DeleteOperation> {
private CasserEntity entity;
private HelenusEntity entity;
private boolean ifExists = false;
private int[] ttl;
private long[] timestamp;
public DeleteOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations);
}
public DeleteOperation(AbstractSessionOperations sessionOperations, CasserEntity entity) {
public DeleteOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity) {
super(sessionOperations);
this.entity = entity;
}
@Override
public BuiltStatement buildStatement() {
if (filters != null && !filters.isEmpty()) {
filters.forEach(f -> addPropertyNode(f.getNode()));
}
if (entity == null) {
throw new CasserMappingException("unknown entity");
throw new HelenusMappingException("unknown entity");
}
if (filters != null && !filters.isEmpty()) {
Delete delete = QueryBuilder.delete().from(entity.getName().toCql());
if (this.ifExists) {
delete.ifExists();
}
Where where = delete.where();
for (Filter<?> filter : filters) {
where.and(filter.getClause(sessionOps.getValuePreparer()));
}
if (ifFilters != null && !ifFilters.isEmpty()) {
for (Filter<?> filter : ifFilters) {
delete.onlyIf(filter.getClause(sessionOps.getValuePreparer()));
}
}
if (this.ttl != null) {
delete.using(QueryBuilder.ttl(this.ttl[0]));
}
@ -87,8 +87,7 @@ public final class DeleteOperation extends AbstractFilterOperation<ResultSet, De
return delete;
}
else {
} else {
return QueryBuilder.truncate(entity.getName().toCql());
}
}
@ -97,12 +96,12 @@ public final class DeleteOperation extends AbstractFilterOperation<ResultSet, De
public ResultSet transform(ResultSet resultSet) {
return resultSet;
}
public DeleteOperation ifExists() {
this.ifExists = true;
return this;
}
public DeleteOperation usingTtl(int ttl) {
this.ttl = new int[1];
this.ttl[0] = ttl;
@ -114,13 +113,13 @@ public final class DeleteOperation extends AbstractFilterOperation<ResultSet, De
this.timestamp[0] = timestamp;
return this;
}
private void addPropertyNode(CasserPropertyNode p) {
private void addPropertyNode(HelenusPropertyNode p) {
if (entity == null) {
entity = p.getEntity();
}
else if (entity != p.getEntity()) {
throw new CasserMappingException("you can delete rows only in single entity " + entity.getMappingInterface() + " or " + p.getEntity().getMappingInterface());
} else if (entity != p.getEntity()) {
throw new HelenusMappingException("you can delete rows only in single entity "
+ entity.getMappingInterface() + " or " + p.getEntity().getMappingInterface());
}
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core.operation;
package net.helenus.core.operation;
import java.util.ArrayList;
import java.util.List;
@ -24,107 +24,108 @@ import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.datastax.driver.core.querybuilder.Insert;
import com.datastax.driver.core.querybuilder.QueryBuilder;
import com.noorq.casser.core.AbstractSessionOperations;
import com.noorq.casser.core.Getter;
import com.noorq.casser.core.reflect.CasserPropertyNode;
import com.noorq.casser.mapping.CasserEntity;
import com.noorq.casser.mapping.CasserProperty;
import com.noorq.casser.mapping.MappingUtil;
import com.noorq.casser.mapping.value.BeanColumnValueProvider;
import com.noorq.casser.support.CasserMappingException;
import com.noorq.casser.support.Fun;
import com.noorq.casser.support.Fun.Tuple2;
import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.Getter;
import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.HelenusProperty;
import net.helenus.mapping.MappingUtil;
import net.helenus.mapping.value.BeanColumnValueProvider;
import net.helenus.support.Fun;
import net.helenus.support.HelenusMappingException;
public final class InsertOperation extends AbstractOperation<ResultSet, InsertOperation> {
private CasserEntity entity;
private final List<Fun.Tuple2<CasserPropertyNode, Object>> values = new ArrayList<Fun.Tuple2<CasserPropertyNode, Object>>();
private HelenusEntity entity;
private final List<Fun.Tuple2<HelenusPropertyNode, Object>> values = new ArrayList<Fun.Tuple2<HelenusPropertyNode, Object>>();
private boolean ifNotExists;
private int[] ttl;
private long[] timestamp;
public InsertOperation(AbstractSessionOperations sessionOperations, boolean ifNotExists) {
super(sessionOperations);
this.ifNotExists = ifNotExists;
}
public InsertOperation(AbstractSessionOperations sessionOperations, CasserEntity entity, Object pojo, boolean ifNotExists) {
public InsertOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity, Object pojo,
boolean ifNotExists) {
super(sessionOperations);
this.ifNotExists = ifNotExists;
for (CasserProperty prop : entity.getOrderedProperties()) {
for (HelenusProperty prop : entity.getOrderedProperties()) {
Object value = BeanColumnValueProvider.INSTANCE.getColumnValue(pojo, -1, prop);
value = sessionOps.getValuePreparer().prepareColumnValue(value, prop);
if (value != null) {
CasserPropertyNode node = new CasserPropertyNode(prop, Optional.empty());
values.add(Tuple2.of(node, value));
HelenusPropertyNode node = new HelenusPropertyNode(prop, Optional.empty());
values.add(Fun.Tuple2.of(node, value));
}
}
}
public InsertOperation ifNotExists() {
this.ifNotExists = true;
return this;
}
public InsertOperation ifNotExists(boolean enable) {
this.ifNotExists = enable;
return this;
}
public <V> InsertOperation value(Getter<V> getter, V val) {
Objects.requireNonNull(getter, "getter is empty");
if (val != null) {
CasserPropertyNode node = MappingUtil.resolveMappingProperty(getter);
HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter);
Object value = sessionOps.getValuePreparer().prepareColumnValue(val, node.getProperty());
if (value != null) {
values.add(Tuple2.of(node, value));
values.add(Fun.Tuple2.of(node, value));
}
}
return this;
}
@Override
public BuiltStatement buildStatement() {
values.forEach(t -> addPropertyNode(t._1));
if (entity == null) {
throw new CasserMappingException("unknown entity");
throw new HelenusMappingException("unknown entity");
}
Insert insert = QueryBuilder.insertInto(entity.getName().toCql());
if (ifNotExists) {
insert.ifNotExists();
}
values.forEach(t -> {
insert.value(t._1.getColumnName(), t._2);
});
if (this.ttl != null) {
insert.using(QueryBuilder.ttl(this.ttl[0]));
}
if (this.timestamp != null) {
insert.using(QueryBuilder.timestamp(this.timestamp[0]));
}
return insert;
}
@ -144,13 +145,13 @@ public final class InsertOperation extends AbstractOperation<ResultSet, InsertOp
this.timestamp[0] = timestamp;
return this;
}
private void addPropertyNode(CasserPropertyNode p) {
private void addPropertyNode(HelenusPropertyNode p) {
if (entity == null) {
entity = p.getEntity();
}
else if (entity != p.getEntity()) {
throw new CasserMappingException("you can insert only single entity " + entity.getMappingInterface() + " or " + p.getEntity().getMappingInterface());
} else if (entity != p.getEntity()) {
throw new HelenusMappingException("you can insert only single entity " + entity.getMappingInterface()
+ " or " + p.getEntity().getMappingInterface());
}
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core.operation;
package net.helenus.core.operation;
import com.datastax.driver.core.BoundStatement;
import com.datastax.driver.core.PreparedStatement;
@ -22,7 +22,7 @@ public final class PreparedOperation<E> {
private final PreparedStatement preparedStatement;
private final AbstractOperation<E, ?> operation;
public PreparedOperation(PreparedStatement statement, AbstractOperation<E, ?> operation) {
this.preparedStatement = statement;
this.operation = operation;
@ -33,9 +33,9 @@ public final class PreparedOperation<E> {
}
public BoundOperation<E> bind(Object... params) {
BoundStatement boundStatement = preparedStatement.bind(params);
return new BoundOperation<E>(boundStatement, operation);
}
@ -43,5 +43,5 @@ public final class PreparedOperation<E> {
public String toString() {
return preparedStatement.getQueryString();
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core.operation;
package net.helenus.core.operation;
import com.datastax.driver.core.BoundStatement;
import com.datastax.driver.core.PreparedStatement;
@ -22,27 +22,26 @@ public final class PreparedOptionalOperation<E> {
private final PreparedStatement preparedStatement;
private final AbstractOptionalOperation<E, ?> operation;
public PreparedOptionalOperation(PreparedStatement statement, AbstractOptionalOperation<E, ?> operation) {
this.preparedStatement = statement;
this.operation = operation;
}
public PreparedStatement getPreparedStatement() {
return preparedStatement;
}
public BoundOptionalOperation<E> bind(Object... params) {
BoundStatement boundStatement = preparedStatement.bind(params);
return new BoundOptionalOperation<E>(boundStatement, operation);
}
@Override
public String toString() {
return preparedStatement.getQueryString();
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core.operation;
package net.helenus.core.operation;
import com.datastax.driver.core.BoundStatement;
import com.datastax.driver.core.PreparedStatement;
@ -22,27 +22,26 @@ public final class PreparedStreamOperation<E> {
private final PreparedStatement preparedStatement;
private final AbstractStreamOperation<E, ?> operation;
public PreparedStreamOperation(PreparedStatement statement, AbstractStreamOperation<E, ?> operation) {
this.preparedStatement = statement;
this.operation = operation;
}
public PreparedStatement getPreparedStatement() {
return preparedStatement;
}
public BoundStreamOperation<E> bind(Object... params) {
BoundStatement boundStatement = preparedStatement.bind(params);
return new BoundStreamOperation<E>(boundStatement, operation);
}
@Override
public String toString() {
return preparedStatement.getQueryString();
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core.operation;
package net.helenus.core.operation;
import java.util.Optional;
import java.util.function.Function;
@ -21,23 +21,22 @@ import java.util.function.Function;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement;
public final class SelectFirstOperation<E> extends AbstractFilterOptionalOperation<E, SelectFirstOperation<E>> {
private final SelectOperation<E> src;
public SelectFirstOperation(SelectOperation<E> src) {
super(src.sessionOps);
this.src = src;
this.filters = src.filters;
this.ifFilters = src.ifFilters;
}
public <R> SelectFirstTransformingOperation<R, E> map(Function<E, R> fn) {
return new SelectFirstTransformingOperation<R, E>(src, fn);
}
@Override
public BuiltStatement buildStatement() {
return src.buildStatement();
@ -47,6 +46,5 @@ public final class SelectFirstOperation<E> extends AbstractFilterOptionalOperati
public Optional<E> transform(ResultSet resultSet) {
return src.transform(resultSet).findFirst();
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core.operation;
package net.helenus.core.operation;
import java.util.Optional;
import java.util.function.Function;
@ -21,21 +21,22 @@ import java.util.function.Function;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement;
public final class SelectFirstTransformingOperation<R, E> extends AbstractFilterOptionalOperation<R, SelectFirstTransformingOperation<R, E>> {
public final class SelectFirstTransformingOperation<R, E>
extends
AbstractFilterOptionalOperation<R, SelectFirstTransformingOperation<R, E>> {
private final SelectOperation<E> src;
private final Function<E, R> fn;
public SelectFirstTransformingOperation(SelectOperation<E> src, Function<E, R> fn) {
super(src.sessionOps);
this.src = src;
this.fn = fn;
this.filters = src.filters;
this.ifFilters = src.ifFilters;
}
@Override
public BuiltStatement buildStatement() {
return src.buildStatement();

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,16 +13,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core.operation;
package net.helenus.core.operation;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Spliterator;
import java.util.Spliterators;
import java.util.*;
import java.util.function.Function;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
@ -35,140 +28,129 @@ import com.datastax.driver.core.querybuilder.QueryBuilder;
import com.datastax.driver.core.querybuilder.Select;
import com.datastax.driver.core.querybuilder.Select.Selection;
import com.datastax.driver.core.querybuilder.Select.Where;
import com.noorq.casser.core.AbstractSessionOperations;
import com.noorq.casser.core.Casser;
import com.noorq.casser.core.Filter;
import com.noorq.casser.core.Getter;
import com.noorq.casser.core.Ordered;
import com.noorq.casser.core.reflect.CasserPropertyNode;
import com.noorq.casser.mapping.CasserEntity;
import com.noorq.casser.mapping.MappingUtil;
import com.noorq.casser.mapping.OrderingDirection;
import com.noorq.casser.mapping.value.ColumnValueProvider;
import com.noorq.casser.mapping.value.ValueProviderMap;
import com.noorq.casser.support.CasserMappingException;
import com.noorq.casser.support.Fun.ArrayTuple;
import net.helenus.core.*;
import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.MappingUtil;
import net.helenus.mapping.OrderingDirection;
import net.helenus.mapping.value.ColumnValueProvider;
import net.helenus.mapping.value.ValueProviderMap;
import net.helenus.support.Fun;
import net.helenus.support.HelenusMappingException;
public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, SelectOperation<E>> {
protected Function<Row, E> rowMapper = null;
protected final List<CasserPropertyNode> props = new ArrayList<CasserPropertyNode>();
protected final List<HelenusPropertyNode> props = new ArrayList<HelenusPropertyNode>();
protected List<Ordering> ordering = null;
protected Integer limit = null;
protected boolean allowFiltering = false;
public SelectOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations);
this.rowMapper = new Function<Row, E>() {
@Override
public E apply(Row source) {
ColumnValueProvider valueProvider = sessionOps.getValueProvider();
ColumnValueProvider valueProvider = sessionOps.getValueProvider();
Object[] arr = new Object[props.size()];
int i = 0;
for (CasserPropertyNode p : props) {
for (HelenusPropertyNode p : props) {
Object value = valueProvider.getColumnValue(source, -1, p.getProperty());
arr[i++] = value;
}
return (E) ArrayTuple.of(arr);
return (E) Fun.ArrayTuple.of(arr);
}
};
}
public SelectOperation(AbstractSessionOperations sessionOperations,
CasserEntity entity) {
public SelectOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity) {
super(sessionOperations);
entity.getOrderedProperties()
.stream()
.map(p -> new CasserPropertyNode(p, Optional.empty()))
.forEach(p -> this.props.add(p));
}
public SelectOperation(AbstractSessionOperations sessionOperations,
CasserEntity entity,
Function<Row, E> rowMapper) {
super(sessionOperations);
this.rowMapper = rowMapper;
entity.getOrderedProperties()
.stream()
.map(p -> new CasserPropertyNode(p, Optional.empty()))
.forEach(p -> this.props.add(p));
entity.getOrderedProperties().stream().map(p -> new HelenusPropertyNode(p, Optional.empty()))
.forEach(p -> this.props.add(p));
}
public SelectOperation(AbstractSessionOperations sessionOperations,
Function<Row, E> rowMapper,
CasserPropertyNode... props) {
public SelectOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity,
Function<Row, E> rowMapper) {
super(sessionOperations);
this.rowMapper = rowMapper;
entity.getOrderedProperties().stream().map(p -> new HelenusPropertyNode(p, Optional.empty()))
.forEach(p -> this.props.add(p));
}
public SelectOperation(AbstractSessionOperations sessionOperations, Function<Row, E> rowMapper,
HelenusPropertyNode... props) {
super(sessionOperations);
this.rowMapper = rowMapper;
Collections.addAll(this.props, props);
}
public CountOperation count() {
CasserEntity entity = null;
for (CasserPropertyNode prop : props) {
HelenusEntity entity = null;
for (HelenusPropertyNode prop : props) {
if (entity == null) {
entity = prop.getEntity();
}
else if (entity != prop.getEntity()) {
throw new CasserMappingException("you can count records only from a single entity " + entity.getMappingInterface() + " or " + prop.getEntity().getMappingInterface());
} else if (entity != prop.getEntity()) {
throw new HelenusMappingException("you can count records only from a single entity "
+ entity.getMappingInterface() + " or " + prop.getEntity().getMappingInterface());
}
}
return new CountOperation(sessionOps, entity);
}
public SelectFirstOperation<E> single() {
limit(1);
return new SelectFirstOperation<E>(this);
}
public <R> SelectTransformingOperation<R, E> mapTo(Class<R> entityClass) {
Objects.requireNonNull(entityClass, "entityClass is null");
CasserEntity entity = Casser.entity(entityClass);
HelenusEntity entity = Helenus.entity(entityClass);
this.rowMapper = null;
return new SelectTransformingOperation<R, E>(this, (r) -> {
Map<String, Object> map = new ValueProviderMap(r, sessionOps.getValueProvider(), entity);
return (R) Casser.map(entityClass, map);
return (R) Helenus.map(entityClass, map);
});
}
public <R> SelectTransformingOperation<R, E> map(Function<E, R> fn) {
return new SelectTransformingOperation<R, E>(this, fn);
}
public SelectOperation<E> column(Getter<?> getter) {
CasserPropertyNode p = MappingUtil.resolveMappingProperty(getter);
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(getter);
this.props.add(p);
return this;
}
public SelectOperation<E> orderBy(Getter<?> getter, OrderingDirection direction) {
getOrCreateOrdering().add(new Ordered(getter, direction).getOrdering());
return this;
}
public SelectOperation<E> orderBy(Ordered ordered) {
getOrCreateOrdering().add(ordered.getOrdering());
return this;
@ -178,89 +160,91 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
this.limit = limit;
return this;
}
public SelectOperation<E> allowFiltering() {
this.allowFiltering = true;
return this;
}
@Override
public BuiltStatement buildStatement() {
CasserEntity entity = null;
HelenusEntity entity = null;
Selection selection = QueryBuilder.select();
for (CasserPropertyNode prop : props) {
for (HelenusPropertyNode prop : props) {
selection = selection.column(prop.getColumnName());
if (prop.getProperty().caseSensitiveIndex()) {
allowFiltering = true;
}
if (entity == null) {
entity = prop.getEntity();
}
else if (entity != prop.getEntity()) {
throw new CasserMappingException("you can select columns only from a single entity " + entity.getMappingInterface() + " or " + prop.getEntity().getMappingInterface());
} else if (entity != prop.getEntity()) {
throw new HelenusMappingException("you can select columns only from a single entity "
+ entity.getMappingInterface() + " or " + prop.getEntity().getMappingInterface());
}
}
if (entity == null) {
throw new CasserMappingException("no entity or table to select data");
throw new HelenusMappingException("no entity or table to select data");
}
Select select = selection.from(entity.getName().toCql());
if (ordering != null && !ordering.isEmpty()) {
select.orderBy(ordering.toArray(new Ordering[ordering.size()]));
}
if (limit != null) {
select.limit(limit.intValue());
select.limit(limit);
}
if (filters != null && !filters.isEmpty()) {
Where where = select.where();
for (Filter<?> filter : filters) {
where.and(filter.getClause(sessionOps.getValuePreparer()));
}
}
if (ifFilters != null && !ifFilters.isEmpty()) {
logger.error("onlyIf conditions " + ifFilters + " would be ignored in the statement " + select);
}
if (allowFiltering) {
select.allowFiltering();
}
return select;
}
@SuppressWarnings("unchecked")
@Override
public Stream<E> transform(ResultSet resultSet) {
if (rowMapper != null) {
return StreamSupport.stream(
Spliterators.spliteratorUnknownSize(resultSet.iterator(), Spliterator.ORDERED)
, false).map(rowMapper);
return StreamSupport
.stream(Spliterators.spliteratorUnknownSize(resultSet.iterator(), Spliterator.ORDERED), false)
.map(rowMapper);
}
else {
return (Stream<E>) StreamSupport.stream(
Spliterators.spliteratorUnknownSize(resultSet.iterator(), Spliterator.ORDERED)
, false);
return (Stream<E>) StreamSupport
.stream(Spliterators.spliteratorUnknownSize(resultSet.iterator(), Spliterator.ORDERED), false);
}
}
private List<Ordering> getOrCreateOrdering() {
if (ordering == null) {
ordering = new ArrayList<Ordering>();
}
return ordering;
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core.operation;
package net.helenus.core.operation;
import java.util.function.Function;
import java.util.stream.Stream;
@ -21,21 +21,22 @@ import java.util.stream.Stream;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement;
public final class SelectTransformingOperation<R, E> extends AbstractFilterStreamOperation<R, SelectTransformingOperation<R, E>> {
public final class SelectTransformingOperation<R, E>
extends
AbstractFilterStreamOperation<R, SelectTransformingOperation<R, E>> {
private final SelectOperation<E> src;
private final Function<E, R> fn;
public SelectTransformingOperation(SelectOperation<E> src, Function<E, R> fn) {
super(src.sessionOps);
this.src = src;
this.fn = fn;
this.filters = src.filters;
this.ifFilters = src.ifFilters;
}
@Override
public BuiltStatement buildStatement() {
return src.buildStatement();
@ -45,6 +46,5 @@ public final class SelectTransformingOperation<R, E> extends AbstractFilterStrea
public Stream<R> transform(ResultSet resultSet) {
return src.transform(resultSet).map(fn);
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,14 +13,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core.operation;
package net.helenus.core.operation;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.*;
import java.util.function.Function;
import com.datastax.driver.core.ResultSet;
@ -28,77 +23,75 @@ import com.datastax.driver.core.querybuilder.Assignment;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.datastax.driver.core.querybuilder.QueryBuilder;
import com.datastax.driver.core.querybuilder.Update;
import com.noorq.casser.core.AbstractSessionOperations;
import com.noorq.casser.core.CasserValidator;
import com.noorq.casser.core.Filter;
import com.noorq.casser.core.Getter;
import com.noorq.casser.core.reflect.CasserPropertyNode;
import com.noorq.casser.mapping.CasserEntity;
import com.noorq.casser.mapping.CasserProperty;
import com.noorq.casser.mapping.MappingUtil;
import com.noorq.casser.support.CasserMappingException;
import com.noorq.casser.support.Immutables;
import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.Filter;
import net.helenus.core.Getter;
import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.HelenusProperty;
import net.helenus.mapping.MappingUtil;
import net.helenus.support.HelenusMappingException;
import net.helenus.support.Immutables;
public final class UpdateOperation extends AbstractFilterOperation<ResultSet, UpdateOperation> {
private CasserEntity entity = null;
private HelenusEntity entity = null;
private final List<Assignment> assignments = new ArrayList<Assignment>();
private int[] ttl;
private long[] timestamp;
public UpdateOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations);
}
public UpdateOperation(AbstractSessionOperations sessionOperations, CasserPropertyNode p, Object v) {
public UpdateOperation(AbstractSessionOperations sessionOperations, HelenusPropertyNode p, Object v) {
super(sessionOperations);
Object value = sessionOps.getValuePreparer().prepareColumnValue(v, p.getProperty());
assignments.add(QueryBuilder.set(p.getColumnName(), value));
addPropertyNode(p);
}
public <V> UpdateOperation set(Getter<V> getter, V v) {
Objects.requireNonNull(getter, "getter is empty");
CasserPropertyNode p = MappingUtil.resolveMappingProperty(getter);
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(getter);
Object value = sessionOps.getValuePreparer().prepareColumnValue(v, p.getProperty());
assignments.add(QueryBuilder.set(p.getColumnName(), value));
addPropertyNode(p);
return this;
}
/*
*
*
*
*
* COUNTER
*
*
*
*
*/
public <V> UpdateOperation increment(Getter<V> counterGetter) {
return increment(counterGetter, 1L);
}
public <V> UpdateOperation increment(Getter<V> counterGetter, long delta) {
Objects.requireNonNull(counterGetter, "counterGetter is empty");
CasserPropertyNode p = MappingUtil.resolveMappingProperty(counterGetter);
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(counterGetter);
assignments.add(QueryBuilder.incr(p.getColumnName(), delta));
addPropertyNode(p);
return this;
}
public <V> UpdateOperation decrement(Getter<V> counterGetter) {
@ -106,145 +99,143 @@ public final class UpdateOperation extends AbstractFilterOperation<ResultSet, Up
}
public <V> UpdateOperation decrement(Getter<V> counterGetter, long delta) {
Objects.requireNonNull(counterGetter, "counterGetter is empty");
CasserPropertyNode p = MappingUtil.resolveMappingProperty(counterGetter);
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(counterGetter);
assignments.add(QueryBuilder.decr(p.getColumnName(), delta));
addPropertyNode(p);
return this;
}
/*
*
*
*
*
* LIST
*
*
*/
public <V> UpdateOperation prepend(Getter<List<V>> listGetter, V value) {
Objects.requireNonNull(listGetter, "listGetter is empty");
Objects.requireNonNull(value, "value is empty");
CasserPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
Object valueObj = prepareSingleListValue(p, value);
assignments.add(QueryBuilder.prepend(p.getColumnName(), valueObj));
addPropertyNode(p);
return this;
}
public <V> UpdateOperation prependAll(Getter<List<V>> listGetter, List<V> value) {
Objects.requireNonNull(listGetter, "listGetter is empty");
Objects.requireNonNull(value, "value is empty");
CasserPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
List valueObj = prepareListValue(p, value);
assignments.add(QueryBuilder.prependAll(p.getColumnName(), valueObj));
addPropertyNode(p);
return this;
}
public <V> UpdateOperation setIdx(Getter<List<V>> listGetter, int idx, V value) {
Objects.requireNonNull(listGetter, "listGetter is empty");
Objects.requireNonNull(value, "value is empty");
CasserPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
Object valueObj = prepareSingleListValue(p, value);
assignments.add(QueryBuilder.setIdx(p.getColumnName(), idx, valueObj));
addPropertyNode(p);
return this;
}
public <V> UpdateOperation append(Getter<List<V>> listGetter, V value) {
Objects.requireNonNull(listGetter, "listGetter is empty");
Objects.requireNonNull(value, "value is empty");
CasserPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
Object valueObj = prepareSingleListValue(p, value);
assignments.add(QueryBuilder.append(p.getColumnName(), valueObj));
addPropertyNode(p);
return this;
}
public <V> UpdateOperation appendAll(Getter<List<V>> listGetter, List<V> value) {
Objects.requireNonNull(listGetter, "listGetter is empty");
Objects.requireNonNull(value, "value is empty");
CasserPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
List valueObj = prepareListValue(p, value);
assignments.add(QueryBuilder.appendAll(p.getColumnName(), valueObj));
addPropertyNode(p);
return this;
}
public <V> UpdateOperation discard(Getter<List<V>> listGetter, V value) {
Objects.requireNonNull(listGetter, "listGetter is empty");
Objects.requireNonNull(value, "value is empty");
CasserPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
Object valueObj = prepareSingleListValue(p, value);
assignments.add(QueryBuilder.discard(p.getColumnName(), valueObj));
addPropertyNode(p);
return this;
}
public <V> UpdateOperation discardAll(Getter<List<V>> listGetter, List<V> value) {
Objects.requireNonNull(listGetter, "listGetter is empty");
Objects.requireNonNull(value, "value is empty");
CasserPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(listGetter);
List valueObj = prepareListValue(p, value);
assignments.add(QueryBuilder.discardAll(p.getColumnName(), valueObj));
addPropertyNode(p);
return this;
}
private Object prepareSingleListValue(CasserPropertyNode p, Object value) {
CasserProperty prop = p.getProperty();
private Object prepareSingleListValue(HelenusPropertyNode p, Object value) {
HelenusProperty prop = p.getProperty();
Object valueObj = value;
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
if (converter.isPresent()) {
List convertedList = (List) converter.get().apply(Immutables.listOf(value));
valueObj = convertedList.get(0);
}
return valueObj;
}
private List prepareListValue(CasserPropertyNode p, List value) {
CasserProperty prop = p.getProperty();
private List prepareListValue(HelenusPropertyNode p, List value) {
HelenusProperty prop = p.getProperty();
List valueObj = value;
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
if (converter.isPresent()) {
valueObj = (List) converter.get().apply(value);
@ -252,157 +243,156 @@ public final class UpdateOperation extends AbstractFilterOperation<ResultSet, Up
return valueObj;
}
/*
*
*
*
*
* SET
*
*
*
*
*/
public <V> UpdateOperation add(Getter<Set<V>> setGetter, V value) {
Objects.requireNonNull(setGetter, "setGetter is empty");
Objects.requireNonNull(value, "value is empty");
CasserPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
Object valueObj = prepareSingleSetValue(p, value);
assignments.add(QueryBuilder.add(p.getColumnName(), valueObj));
addPropertyNode(p);
return this;
}
public <V> UpdateOperation addAll(Getter<Set<V>> setGetter, Set<V> value) {
Objects.requireNonNull(setGetter, "setGetter is empty");
Objects.requireNonNull(value, "value is empty");
CasserPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
Set valueObj = prepareSetValue(p, value);
assignments.add(QueryBuilder.addAll(p.getColumnName(), valueObj));
addPropertyNode(p);
return this;
}
public <V> UpdateOperation remove(Getter<Set<V>> setGetter, V value) {
Objects.requireNonNull(setGetter, "setGetter is empty");
Objects.requireNonNull(value, "value is empty");
CasserPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
Object valueObj = prepareSingleSetValue(p, value);
assignments.add(QueryBuilder.remove(p.getColumnName(), valueObj));
addPropertyNode(p);
return this;
}
public <V> UpdateOperation removeAll(Getter<Set<V>> setGetter, Set<V> value) {
Objects.requireNonNull(setGetter, "setGetter is empty");
Objects.requireNonNull(value, "value is empty");
CasserPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(setGetter);
Set valueObj = prepareSetValue(p, value);
assignments.add(QueryBuilder.removeAll(p.getColumnName(), valueObj));
addPropertyNode(p);
return this;
}
private Object prepareSingleSetValue(CasserPropertyNode p, Object value) {
CasserProperty prop = p.getProperty();
private Object prepareSingleSetValue(HelenusPropertyNode p, Object value) {
HelenusProperty prop = p.getProperty();
Object valueObj = value;
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
if (converter.isPresent()) {
Set convertedSet = (Set) converter.get().apply(Immutables.setOf(value));
valueObj = convertedSet.iterator().next();
}
return valueObj;
}
private Set prepareSetValue(CasserPropertyNode p, Set value) {
CasserProperty prop = p.getProperty();
private Set prepareSetValue(HelenusPropertyNode p, Set value) {
HelenusProperty prop = p.getProperty();
Set valueObj = value;
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
if (converter.isPresent()) {
valueObj = (Set) converter.get().apply(value);
}
return valueObj;
}
/*
*
*
*
*
* MAP
*
*
*
*
*/
public <K,V> UpdateOperation put(Getter<Map<K, V>> mapGetter, K key, V value) {
public <K, V> UpdateOperation put(Getter<Map<K, V>> mapGetter, K key, V value) {
Objects.requireNonNull(mapGetter, "mapGetter is empty");
Objects.requireNonNull(key, "key is empty");
CasserPropertyNode p = MappingUtil.resolveMappingProperty(mapGetter);
CasserProperty prop = p.getProperty();
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(mapGetter);
HelenusProperty prop = p.getProperty();
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
if (converter.isPresent()) {
Map<Object, Object> convertedMap = (Map<Object, Object>) converter.get().apply(Immutables.mapOf(key, value));
Map<Object, Object> convertedMap = (Map<Object, Object>) converter.get()
.apply(Immutables.mapOf(key, value));
for (Map.Entry<Object, Object> e : convertedMap.entrySet()) {
assignments.add(QueryBuilder.put(p.getColumnName(), e.getKey(), e.getValue()));
}
}
else {
} else {
assignments.add(QueryBuilder.put(p.getColumnName(), key, value));
}
addPropertyNode(p);
return this;
}
public <K,V> UpdateOperation putAll(Getter<Map<K, V>> mapGetter, Map<K, V> map) {
}
public <K, V> UpdateOperation putAll(Getter<Map<K, V>> mapGetter, Map<K, V> map) {
Objects.requireNonNull(mapGetter, "mapGetter is empty");
Objects.requireNonNull(map, "map is empty");
CasserPropertyNode p = MappingUtil.resolveMappingProperty(mapGetter);
CasserProperty prop = p.getProperty();
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(mapGetter);
HelenusProperty prop = p.getProperty();
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository());
if (converter.isPresent()) {
Map convertedMap = (Map) converter.get().apply(map);
assignments.add(QueryBuilder.putAll(p.getColumnName(), convertedMap));
}
else {
} else {
assignments.add(QueryBuilder.putAll(p.getColumnName(), map));
}
addPropertyNode(p);
return this;
}
}
@Override
public BuiltStatement buildStatement() {
if (entity == null) {
throw new CasserMappingException("empty update operation");
throw new HelenusMappingException("empty update operation");
}
Update update = QueryBuilder.update(entity.getName().toCql());
for (Assignment assignment : assignments) {
@ -410,26 +400,26 @@ public final class UpdateOperation extends AbstractFilterOperation<ResultSet, Up
}
if (filters != null && !filters.isEmpty()) {
for (Filter<?> filter : filters) {
update.where(filter.getClause(sessionOps.getValuePreparer()));
}
}
if (ifFilters != null && !ifFilters.isEmpty()) {
for (Filter<?> filter : ifFilters) {
update.onlyIf(filter.getClause(sessionOps.getValuePreparer()));
}
}
if (this.ttl != null) {
update.using(QueryBuilder.ttl(this.ttl[0]));
}
if (this.timestamp != null) {
update.using(QueryBuilder.timestamp(this.timestamp[0]));
}
return update;
}
@ -437,7 +427,7 @@ public final class UpdateOperation extends AbstractFilterOperation<ResultSet, Up
public ResultSet transform(ResultSet resultSet) {
return resultSet;
}
public UpdateOperation usingTtl(int ttl) {
this.ttl = new int[1];
this.ttl[0] = ttl;
@ -449,13 +439,13 @@ public final class UpdateOperation extends AbstractFilterOperation<ResultSet, Up
this.timestamp[0] = timestamp;
return this;
}
private void addPropertyNode(CasserPropertyNode p) {
private void addPropertyNode(HelenusPropertyNode p) {
if (entity == null) {
entity = p.getEntity();
}
else if (entity != p.getEntity()) {
throw new CasserMappingException("you can update columns only in single entity " + entity.getMappingInterface() + " or " + p.getEntity().getMappingInterface());
} else if (entity != p.getEntity()) {
throw new HelenusMappingException("you can update columns only in single entity "
+ entity.getMappingInterface() + " or " + p.getEntity().getMappingInterface());
}
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,33 +13,27 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core.reflect;
package net.helenus.core.reflect;
import java.util.HashMap;
import java.util.Map;
public enum DefaultPrimitiveTypes {
BOOLEAN(boolean.class, false),
BYTE(byte.class, (byte)0x0),
CHAR(char.class, (char)0x0),
SHORT(short.class, (short)0),
INT(int.class, 0),
LONG(long.class, 0L),
FLOAT(float.class, 0.0f),
DOUBLE(double.class, 0.0);
BOOLEAN(boolean.class, false), BYTE(byte.class, (byte) 0x0), CHAR(char.class, (char) 0x0), SHORT(short.class,
(short) 0), INT(int.class, 0), LONG(long.class, 0L), FLOAT(float.class, 0.0f), DOUBLE(double.class, 0.0);
private final Class<?> primitiveClass;
private final Object defaultValue;
private final static Map<Class<?>, DefaultPrimitiveTypes> map = new HashMap<Class<?>, DefaultPrimitiveTypes>();
static {
for (DefaultPrimitiveTypes type : DefaultPrimitiveTypes.values()) {
map.put(type.getPrimitiveClass(), type);
}
}
private DefaultPrimitiveTypes(Class<?> primitiveClass, Object defaultValue) {
this.primitiveClass = primitiveClass;
this.defaultValue = defaultValue;
@ -48,7 +42,7 @@ public enum DefaultPrimitiveTypes {
public static DefaultPrimitiveTypes lookup(Class<?> primitiveClass) {
return map.get(primitiveClass);
}
public Class<?> getPrimitiveClass() {
return primitiveClass;
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,18 +13,17 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core.reflect;
import com.noorq.casser.mapping.CasserEntity;
package net.helenus.core.reflect;
import net.helenus.mapping.HelenusEntity;
public interface DslExportable {
public static final String GET_ENTITY_METHOD = "getCasserMappingEntity";
public static final String GET_PARENT_METHOD = "getParentDslCasserPropertyNode";
CasserEntity getCasserMappingEntity();
CasserPropertyNode getParentDslCasserPropertyNode();
public static final String GET_ENTITY_METHOD = "getHelenusMappingEntity";
public static final String GET_PARENT_METHOD = "getParentDslHelenusPropertyNode";
HelenusEntity getHelenusMappingEntity();
HelenusPropertyNode getParentDslHelenusPropertyNode();
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,8 +13,10 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core.reflect;
package net.helenus.core.reflect;
import java.lang.invoke.MethodHandle;
import java.lang.invoke.MethodHandles;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
@ -22,72 +24,69 @@ import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import com.datastax.driver.core.DataType;
import com.datastax.driver.core.TupleType;
import com.datastax.driver.core.TupleValue;
import com.datastax.driver.core.UDTValue;
import com.noorq.casser.core.Casser;
import com.noorq.casser.mapping.CasserEntity;
import com.noorq.casser.mapping.CasserMappingEntity;
import com.noorq.casser.mapping.CasserProperty;
import com.noorq.casser.mapping.type.AbstractDataType;
import com.noorq.casser.mapping.type.DTDataType;
import com.noorq.casser.mapping.type.UDTDataType;
import com.noorq.casser.support.CasserException;
import com.noorq.casser.support.DslPropertyException;
import com.datastax.driver.core.*;
import net.helenus.core.Helenus;
import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.HelenusMappingEntity;
import net.helenus.mapping.HelenusProperty;
import net.helenus.mapping.type.AbstractDataType;
import net.helenus.mapping.type.DTDataType;
import net.helenus.mapping.type.UDTDataType;
import net.helenus.support.DslPropertyException;
import net.helenus.support.HelenusException;
public class DslInvocationHandler<E> implements InvocationHandler {
private final CasserEntity entity;
private final Optional<CasserPropertyNode> parent;
private final Map<Method, CasserProperty> map = new HashMap<Method, CasserProperty>();
private final HelenusEntity entity;
private final Optional<HelenusPropertyNode> parent;
private final Map<Method, HelenusProperty> map = new HashMap<Method, HelenusProperty>();
private final Map<Method, Object> udtMap = new HashMap<Method, Object>();
private final Map<Method, Object> tupleMap = new HashMap<Method, Object>();
public DslInvocationHandler(Class<E> iface, ClassLoader classLoader, Optional<CasserPropertyNode> parent) {
this.entity = new CasserMappingEntity(iface);
public DslInvocationHandler(Class<E> iface, ClassLoader classLoader, Optional<HelenusPropertyNode> parent, Metadata metadata) {
this.entity = new HelenusMappingEntity(iface, metadata);
this.parent = parent;
for (CasserProperty prop : entity.getOrderedProperties()) {
for (HelenusProperty prop : entity.getOrderedProperties()) {
map.put(prop.getGetterMethod(), prop);
AbstractDataType type = prop.getDataType();
Class<?> javaType = prop.getJavaType();
if (type instanceof UDTDataType && !UDTValue.class.isAssignableFrom(javaType)) {
Object childDsl = Casser.dsl(javaType, classLoader,
Optional.of(new CasserPropertyNode(prop, parent)));
Object childDsl = Helenus.dsl(javaType, classLoader,
Optional.of(new HelenusPropertyNode(prop, parent)), metadata);
udtMap.put(prop.getGetterMethod(), childDsl);
}
if (type instanceof DTDataType) {
DTDataType dataType = (DTDataType) type;
if (dataType.getDataType() instanceof TupleType && !TupleValue.class.isAssignableFrom(javaType)) {
Object childDsl = Casser.dsl(javaType, classLoader,
Optional.of(new CasserPropertyNode(prop, parent)));
Object childDsl = Helenus.dsl(javaType, classLoader,
Optional.of(new HelenusPropertyNode(prop, parent)), metadata);
tupleMap.put(prop.getGetterMethod(), childDsl);
}
}
}
}
@Override
public Object invoke(Object proxy, Method method, Object[] args)
throws Throwable {
String methodName = method.getName();
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
String methodName = method.getName();
if ("equals".equals(methodName) && method.getParameterCount() == 1) {
Object otherObj = args[0];
if (otherObj == null) {
@ -98,80 +97,83 @@ public class DslInvocationHandler<E> implements InvocationHandler {
}
return false;
}
if (method.getParameterCount() != 0 || method.getReturnType() == void.class) {
throw new CasserException("invalid getter method " + method);
throw new HelenusException("invalid getter method " + method);
}
if ("hashCode".equals(methodName)) {
return hashCode();
}
if ("toString".equals(methodName)) {
return entity.toString();
}
if (DslExportable.GET_ENTITY_METHOD.equals(methodName)) {
return entity;
}
if (DslExportable.GET_PARENT_METHOD.equals(methodName)) {
return parent.get();
}
CasserProperty prop = map.get(method);
HelenusProperty prop = map.get(method);
if (prop == null) {
prop = entity.getProperty(methodName);
}
if (prop != null) {
AbstractDataType type = prop.getDataType();
if (type instanceof UDTDataType) {
Object childDsl = udtMap.get(method);
if (childDsl != null) {
return childDsl;
}
}
if (type instanceof DTDataType) {
DTDataType dataType = (DTDataType) type;
DataType dt = dataType.getDataType();
switch(dt.getName()) {
case TUPLE:
Object childDsl = tupleMap.get(method);
if (childDsl != null) {
return childDsl;
}
break;
case SET:
return new SetDsl(new CasserPropertyNode(prop, parent));
case LIST:
return new ListDsl(new CasserPropertyNode(prop, parent));
case MAP:
return new MapDsl(new CasserPropertyNode(prop, parent));
switch (dt.getName()) {
case TUPLE :
Object childDsl = tupleMap.get(method);
if (childDsl != null) {
return childDsl;
}
break;
case SET :
return new SetDsl(new HelenusPropertyNode(prop, parent));
case LIST :
return new ListDsl(new HelenusPropertyNode(prop, parent));
case MAP :
return new MapDsl(new HelenusPropertyNode(prop, parent));
default :
break;
default:
break;
}
}
throw new DslPropertyException(new CasserPropertyNode(prop, parent));
throw new DslPropertyException(new HelenusPropertyNode(prop, parent));
}
throw new CasserException("invalid method call " + method);
throw new HelenusException("invalid method call " + method);
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core.reflect;
package net.helenus.core.reflect;
import java.lang.annotation.Annotation;
import java.lang.reflect.Method;
@ -22,27 +22,22 @@ import java.util.function.Function;
import javax.validation.ConstraintValidator;
import com.noorq.casser.core.SessionRepository;
import com.noorq.casser.mapping.CasserEntity;
import com.noorq.casser.mapping.CasserProperty;
import com.noorq.casser.mapping.ColumnType;
import com.noorq.casser.mapping.IdentityName;
import com.noorq.casser.mapping.MappingUtil;
import com.noorq.casser.mapping.OrderingDirection;
import com.noorq.casser.mapping.type.AbstractDataType;
import com.noorq.casser.support.CasserMappingException;
import net.helenus.core.SessionRepository;
import net.helenus.mapping.*;
import net.helenus.mapping.type.AbstractDataType;
import net.helenus.support.HelenusMappingException;
public final class CasserNamedProperty implements CasserProperty {
public final class HelenusNamedProperty implements HelenusProperty {
private final String name;
public CasserNamedProperty(String name) {
public HelenusNamedProperty(String name) {
this.name = name;
}
@Override
public CasserEntity getEntity() {
throw new CasserMappingException("will never called");
public HelenusEntity getEntity() {
throw new HelenusMappingException("will never called");
}
@Override
@ -52,7 +47,7 @@ public final class CasserNamedProperty implements CasserProperty {
@Override
public Method getGetterMethod() {
throw new CasserMappingException("will never called");
throw new HelenusMappingException("will never called");
}
@Override
@ -65,14 +60,17 @@ public final class CasserNamedProperty implements CasserProperty {
return Optional.empty();
}
@Override
public boolean caseSensitiveIndex() { return false; }
@Override
public Class<?> getJavaType() {
throw new CasserMappingException("will never called");
throw new HelenusMappingException("will never called");
}
@Override
public AbstractDataType getDataType() {
throw new CasserMappingException("will never called");
throw new HelenusMappingException("will never called");
}
@Override
@ -91,17 +89,15 @@ public final class CasserNamedProperty implements CasserProperty {
}
@Override
public Optional<Function<Object, Object>> getReadConverter(
SessionRepository repository) {
public Optional<Function<Object, Object>> getReadConverter(SessionRepository repository) {
return Optional.empty();
}
@Override
public Optional<Function<Object, Object>> getWriteConverter(
SessionRepository repository) {
public Optional<Function<Object, Object>> getWriteConverter(SessionRepository repository) {
return Optional.empty();
}
@Override
public ConstraintValidator<? extends Annotation, ?>[] getValidators() {
return MappingUtil.EMPTY_VALIDATORS;

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,24 +13,20 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core.reflect;
package net.helenus.core.reflect;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.*;
import java.util.stream.Collectors;
import com.noorq.casser.mapping.CasserEntity;
import com.noorq.casser.mapping.CasserProperty;
import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.HelenusProperty;
public final class CasserPropertyNode implements Iterable<CasserProperty> {
public final class HelenusPropertyNode implements Iterable<HelenusProperty> {
private final CasserProperty prop;
private final Optional<CasserPropertyNode> next;
public CasserPropertyNode(CasserProperty prop, Optional<CasserPropertyNode> next) {
private final HelenusProperty prop;
private final Optional<HelenusPropertyNode> next;
public HelenusPropertyNode(HelenusProperty prop, Optional<HelenusPropertyNode> next) {
this.prop = prop;
this.next = next;
}
@ -39,77 +35,74 @@ public final class CasserPropertyNode implements Iterable<CasserProperty> {
if (next.isPresent()) {
List<String> columnNames = new ArrayList<String>();
for (CasserProperty p : this) {
for (HelenusProperty p : this) {
columnNames.add(p.getColumnName().toCql(true));
}
Collections.reverse(columnNames);
if (prop instanceof CasserNamedProperty) {
if (prop instanceof HelenusNamedProperty) {
int size = columnNames.size();
StringBuilder str = new StringBuilder();
for (int i = 0; i != size -1; ++i) {
for (int i = 0; i != size - 1; ++i) {
if (str.length() != 0) {
str.append(".");
}
str.append(columnNames.get(i));
}
str.append("[").append(columnNames.get(size-1)).append("]");
str.append("[").append(columnNames.get(size - 1)).append("]");
return str.toString();
}
else {
} else {
return columnNames.stream().collect(Collectors.joining("."));
}
}
else {
} else {
return prop.getColumnName().toCql();
}
}
public CasserEntity getEntity() {
public HelenusEntity getEntity() {
if (next.isPresent()) {
CasserProperty last = prop;
for (CasserProperty p : this) {
HelenusProperty last = prop;
for (HelenusProperty p : this) {
last = p;
}
return last.getEntity();
}
else {
} else {
return prop.getEntity();
}
}
public CasserProperty getProperty() {
public HelenusProperty getProperty() {
return prop;
}
public Optional<CasserPropertyNode> getNext() {
public Optional<HelenusPropertyNode> getNext() {
return next;
}
public Iterator<CasserProperty> iterator() {
public Iterator<HelenusProperty> iterator() {
return new PropertyNodeIterator(Optional.of(this));
}
private static class PropertyNodeIterator implements Iterator<CasserProperty> {
private static class PropertyNodeIterator implements Iterator<HelenusProperty> {
private Optional<CasserPropertyNode> next;
public PropertyNodeIterator(Optional<CasserPropertyNode> next) {
private Optional<HelenusPropertyNode> next;
public PropertyNodeIterator(Optional<HelenusPropertyNode> next) {
this.next = next;
}
@Override
public boolean hasNext() {
return next.isPresent();
}
@Override
public CasserProperty next() {
CasserPropertyNode node = next.get();
public HelenusProperty next() {
HelenusPropertyNode node = next.get();
next = node.next;
return node.prop;
}
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,34 +13,30 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core.reflect;
package net.helenus.core.reflect;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import java.util.Optional;
import java.util.*;
import com.noorq.casser.mapping.CasserProperty;
import com.noorq.casser.support.CasserMappingException;
import com.noorq.casser.support.DslPropertyException;
import net.helenus.mapping.HelenusProperty;
import net.helenus.support.DslPropertyException;
import net.helenus.support.HelenusMappingException;
public final class ListDsl<V> implements List<V> {
private final CasserPropertyNode parent;
public ListDsl(CasserPropertyNode parent) {
private final HelenusPropertyNode parent;
public ListDsl(HelenusPropertyNode parent) {
this.parent = parent;
}
public CasserPropertyNode getParent() {
public HelenusPropertyNode getParent() {
return parent;
}
@Override
public V get(int index) {
CasserProperty prop = new CasserNamedProperty(Integer.toString(index));
throw new DslPropertyException(new CasserPropertyNode(prop, Optional.of(parent)));
HelenusProperty prop = new HelenusNamedProperty(Integer.toString(index));
throw new DslPropertyException(new HelenusPropertyNode(prop, Optional.of(parent)));
}
@Override
@ -172,9 +168,9 @@ public final class ListDsl<V> implements List<V> {
throwShouldNeverCall();
return null;
}
private void throwShouldNeverCall() {
throw new CasserMappingException("should be never called");
throw new HelenusMappingException("should be never called");
}
@Override

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,35 +13,35 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core.reflect;
package net.helenus.core.reflect;
import java.util.Collection;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import com.noorq.casser.mapping.CasserProperty;
import com.noorq.casser.support.CasserMappingException;
import com.noorq.casser.support.DslPropertyException;
import net.helenus.mapping.HelenusProperty;
import net.helenus.support.DslPropertyException;
import net.helenus.support.HelenusMappingException;
public final class MapDsl<K, V> implements Map<K, V> {
private final CasserPropertyNode parent;
public MapDsl(CasserPropertyNode parent) {
private final HelenusPropertyNode parent;
public MapDsl(HelenusPropertyNode parent) {
this.parent = parent;
}
public CasserPropertyNode getParent() {
public HelenusPropertyNode getParent() {
return parent;
}
@Override
public V get(Object key) {
CasserProperty prop = new CasserNamedProperty(key.toString());
throw new DslPropertyException(new CasserPropertyNode(prop, Optional.of(parent)));
HelenusProperty prop = new HelenusNamedProperty(key.toString());
throw new DslPropertyException(new HelenusPropertyNode(prop, Optional.of(parent)));
}
@Override
public int size() {
throwShouldNeverCall();
@ -107,12 +107,12 @@ public final class MapDsl<K, V> implements Map<K, V> {
}
private void throwShouldNeverCall() {
throw new CasserMappingException("should be never called");
throw new HelenusMappingException("should be never called");
}
@Override
public String toString() {
return "MapDsl";
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,14 +13,14 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core.reflect;
package net.helenus.core.reflect;
import java.util.Map;
public interface MapExportable {
public static final String TO_MAP_METHOD = "toMap";
Map<String, Object> toMap();
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,29 +13,49 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core.reflect;
package net.helenus.core.reflect;
import java.lang.invoke.MethodHandle;
import java.lang.invoke.MethodHandles;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.util.Collections;
import java.util.Map;
import com.noorq.casser.support.CasserException;
import net.helenus.support.HelenusException;
public class MapperInvocationHandler<E> implements InvocationHandler {
private final Map<String, Object> src;
private final Class<E> iface;
public MapperInvocationHandler(Class<E> iface, Map<String, Object> src) {
this.src = src;
this.iface = iface;
}
@Override
public Object invoke(Object proxy, Method method, Object[] args)
throws Throwable {
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
if (method.isDefault()) {
// NOTE: This is reflection magic to invoke (non-recursively) a default method implemented on an interface
// that we've proxied (in ReflectionDslInstantiator). I found the answer in this article.
// https://zeroturnaround.com/rebellabs/recognize-and-conquer-java-proxies-default-methods-and-method-handles/
// First, we need an instance of a private inner-class found in MethodHandles.
Constructor<MethodHandles.Lookup> constructor = MethodHandles.Lookup.class.getDeclaredConstructor(Class.class, int.class);
constructor.setAccessible(true);
// Now we need to lookup and invoke special the default method on the interface class.
final Class<?> declaringClass = method.getDeclaringClass();
Object result = constructor.newInstance(declaringClass, MethodHandles.Lookup.PRIVATE)
.unreflectSpecial(method, declaringClass)
.bindTo(proxy)
.invokeWithArguments(args);
return result;
}
String methodName = method.getName();
@ -49,9 +69,9 @@ public class MapperInvocationHandler<E> implements InvocationHandler {
}
return false;
}
if (method.getParameterCount() != 0 || method.getReturnType() == void.class) {
throw new CasserException("invalid getter method " + method);
throw new HelenusException("invalid getter method " + method);
}
if ("hashCode".equals(methodName)) {
@ -67,25 +87,25 @@ public class MapperInvocationHandler<E> implements InvocationHandler {
}
Object value = src.get(methodName);
if (value == null) {
Class<?> returnType = method.getReturnType();
Class<?> returnType = method.getReturnType();
if (returnType.isPrimitive()) {
DefaultPrimitiveTypes type = DefaultPrimitiveTypes.lookup(returnType);
if (type == null) {
throw new CasserException("unknown primitive type " + returnType);
throw new HelenusException("unknown primitive type " + returnType);
}
return type.getDefaultValue();
}
}
return value;
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,28 +13,24 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core.reflect;
package net.helenus.core.reflect;
import java.lang.reflect.Proxy;
import java.util.Optional;
import com.noorq.casser.core.DslInstantiator;
import com.datastax.driver.core.Metadata;
import net.helenus.core.DslInstantiator;
public enum ReflectionDslInstantiator implements DslInstantiator {
INSTANCE;
@Override
@SuppressWarnings("unchecked")
public <E> E instantiate(Class<E> iface, ClassLoader classLoader, Optional<CasserPropertyNode> parent) {
DslInvocationHandler<E> handler = new DslInvocationHandler<E>(iface, classLoader, parent);
E proxy = (E) Proxy.newProxyInstance(
classLoader,
new Class[] { iface, DslExportable.class },
handler);
public <E> E instantiate(Class<E> iface, ClassLoader classLoader, Optional<HelenusPropertyNode> parent, Metadata metadata) {
DslInvocationHandler<E> handler = new DslInvocationHandler<E>(iface, classLoader, parent, metadata);
E proxy = (E) Proxy.newProxyInstance(classLoader, new Class[]{iface, DslExportable.class}, handler);
return proxy;
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,9 +13,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core.reflect;
package net.helenus.core.reflect;
import com.noorq.casser.support.CasserMappingException;
import net.helenus.support.HelenusMappingException;
public final class ReflectionInstantiator {
@ -23,13 +23,13 @@ public final class ReflectionInstantiator {
}
public static <T> T instantiateClass(Class<T> clazz) {
try {
return clazz.newInstance();
} catch (InstantiationException | IllegalAccessException e) {
throw new CasserMappingException("invalid class " + clazz, e);
throw new HelenusMappingException("invalid class " + clazz, e);
}
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,12 +13,12 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core.reflect;
package net.helenus.core.reflect;
import java.lang.reflect.Proxy;
import java.util.Map;
import com.noorq.casser.core.MapperInstantiator;
import net.helenus.core.MapperInstantiator;
public enum ReflectionMapperInstantiator implements MapperInstantiator {
@ -26,18 +26,12 @@ public enum ReflectionMapperInstantiator implements MapperInstantiator {
@Override
@SuppressWarnings("unchecked")
public <E> E instantiate(Class<E> iface, Map<String, Object> src,
ClassLoader classLoader) {
public <E> E instantiate(Class<E> iface, Map<String, Object> src, ClassLoader classLoader) {
MapperInvocationHandler<E> handler = new MapperInvocationHandler<E>(iface, src);
E proxy = (E) Proxy.newProxyInstance(
classLoader,
new Class[] { iface, MapExportable.class },
handler);
E proxy = (E) Proxy.newProxyInstance(classLoader, new Class[]{iface, MapExportable.class}, handler);
return proxy;
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,26 +13,26 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.core.reflect;
package net.helenus.core.reflect;
import java.util.Collection;
import java.util.Iterator;
import java.util.Set;
import com.noorq.casser.support.CasserMappingException;
import net.helenus.support.HelenusMappingException;
public final class SetDsl<V> implements Set<V> {
private final CasserPropertyNode parent;
public SetDsl(CasserPropertyNode parent) {
private final HelenusPropertyNode parent;
public SetDsl(HelenusPropertyNode parent) {
this.parent = parent;
}
public CasserPropertyNode getParent() {
public HelenusPropertyNode getParent() {
return parent;
}
@Override
public int size() {
throwShouldNeverCall();
@ -109,9 +109,9 @@ public final class SetDsl<V> implements Set<V> {
public void clear() {
throwShouldNeverCall();
}
private void throwShouldNeverCall() {
throw new CasserMappingException("should be never called");
throw new HelenusMappingException("should be never called");
}
@Override

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,15 +13,15 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.mapping;
package net.helenus.mapping;
import java.lang.reflect.Method;
import com.noorq.casser.mapping.annotation.ClusteringColumn;
import com.noorq.casser.mapping.annotation.Column;
import com.noorq.casser.mapping.annotation.PartitionKey;
import com.noorq.casser.mapping.annotation.StaticColumn;
import com.noorq.casser.support.CasserMappingException;
import net.helenus.mapping.annotation.ClusteringColumn;
import net.helenus.mapping.annotation.Column;
import net.helenus.mapping.annotation.PartitionKey;
import net.helenus.mapping.annotation.StaticColumn;
import net.helenus.support.HelenusMappingException;
public final class ColumnInformation {
@ -29,7 +29,7 @@ public final class ColumnInformation {
private final ColumnType columnType;
private final int ordinal;
private final OrderingDirection ordering;
public ColumnInformation(Method getter) {
String columnName = null;
@ -37,18 +37,16 @@ public final class ColumnInformation {
ColumnType columnTypeLocal = ColumnType.COLUMN;
int ordinalLocal = 0;
OrderingDirection orderingLocal = OrderingDirection.ASC;
PartitionKey partitionKey = getter.getDeclaredAnnotation(PartitionKey.class);
if (partitionKey != null) {
columnName = partitionKey.value();
forceQuote = partitionKey.forceQuote();
columnTypeLocal = ColumnType.PARTITION_KEY;
ordinalLocal = partitionKey.ordinal();
}
ClusteringColumn clusteringColumn = getter.getDeclaredAnnotation(ClusteringColumn.class);
if (clusteringColumn != null) {
ensureSingleColumnType(columnTypeLocal, getter);
columnName = clusteringColumn.value();
@ -66,7 +64,7 @@ public final class ColumnInformation {
columnTypeLocal = ColumnType.STATIC_COLUMN;
ordinalLocal = staticColumn.ordinal();
}
Column column = getter.getDeclaredAnnotation(Column.class);
if (column != null) {
ensureSingleColumnType(columnTypeLocal, getter);
@ -75,17 +73,17 @@ public final class ColumnInformation {
columnTypeLocal = ColumnType.COLUMN;
ordinalLocal = column.ordinal();
}
if (columnName == null || columnName.isEmpty()) {
columnName = MappingUtil.getDefaultColumnName(getter);
}
this.columnName = new IdentityName(columnName, forceQuote);
this.columnType = columnTypeLocal;
this.ordinal = ordinalLocal;
this.ordering = orderingLocal;
}
public IdentityName getColumnName() {
return columnName;
}
@ -101,21 +99,19 @@ public final class ColumnInformation {
public OrderingDirection getOrdering() {
return ordering;
}
private void ensureSingleColumnType(ColumnType columnTypeLocal, Method getter) {
if (columnTypeLocal != ColumnType.COLUMN) {
throw new CasserMappingException("property can be annotated only by a single column type " + getter);
throw new HelenusMappingException("property can be annotated only by a single column type " + getter);
}
}
@Override
public String toString() {
return "ColumnInformation [columnName=" + columnName + ", columnType="
+ columnType + ", ordinal=" + ordinal + ", ordering="
+ ordering + "]";
return "ColumnInformation [columnName=" + columnName + ", columnType=" + columnType + ", ordinal=" + ordinal
+ ", ordering=" + ordering + "]";
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.mapping;
package net.helenus.mapping;
public enum ColumnType {
PARTITION_KEY, CLUSTERING_COLUMN, STATIC_COLUMN, COLUMN;

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,20 +13,22 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.mapping;
package net.helenus.mapping;
import java.util.Collection;
public interface CasserEntity {
public interface HelenusEntity {
HelenusEntityType getType();
boolean isCacheable();
CasserEntityType getType();
Class<?> getMappingInterface();
IdentityName getName();
Collection<CasserProperty> getOrderedProperties();
CasserProperty getProperty(String name);
Collection<HelenusProperty> getOrderedProperties();
HelenusProperty getProperty(String name);
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,8 +13,8 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.mapping;
package net.helenus.mapping;
public enum CasserEntityType {
public enum HelenusEntityType {
TABLE, TUPLE, UDT;
}

View file

@ -0,0 +1,270 @@
/*
*
* Copyright (C) 2015 The Helenus Authors
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.helenus.mapping;
import java.lang.reflect.Method;
import java.util.*;
import com.datastax.driver.core.*;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.reflect.TypeToken;
import net.helenus.config.HelenusSettings;
import net.helenus.core.Helenus;
import net.helenus.core.annotation.Cacheable;
import net.helenus.mapping.annotation.*;
import net.helenus.support.HelenusMappingException;
public final class HelenusMappingEntity implements HelenusEntity {
private final Class<?> iface;
private final HelenusEntityType type;
private final IdentityName name;
private final boolean cacheable;
private final ImmutableMap<String, Method> methods;
private final ImmutableMap<String, HelenusProperty> props;
private final ImmutableList<HelenusProperty> orderedProps;
public HelenusMappingEntity(Class<?> iface, Metadata metadata) {
this(iface, autoDetectType(iface), metadata);
}
public HelenusMappingEntity(Class<?> iface, HelenusEntityType type, Metadata metadata) {
if (iface == null || !iface.isInterface()) {
throw new IllegalArgumentException("invalid parameter " + iface);
}
this.iface = iface;
this.type = Objects.requireNonNull(type, "type is empty");
this.name = resolveName(iface, type);
HelenusSettings settings = Helenus.settings();
List<Method> methods = new ArrayList<Method>();
methods.addAll(Arrays.asList(iface.getDeclaredMethods()));
for (Class<?> c : iface.getInterfaces()) {
methods.addAll(Arrays.asList(c.getDeclaredMethods()));
}
List<HelenusProperty> propsLocal = new ArrayList<HelenusProperty>();
ImmutableMap.Builder<String, HelenusProperty> propsBuilder = ImmutableMap.builder();
ImmutableMap.Builder<String, Method> methodsBuilder = ImmutableMap.builder();
for (Method method : methods) {
if (settings.getGetterMethodDetector().apply(method)) {
methodsBuilder.put(method.getName(), method);
if (metadata != null) {
HelenusProperty prop = new HelenusMappingProperty(this, method, metadata);
propsBuilder.put(prop.getPropertyName(), prop);
propsLocal.add(prop);
}
}
}
this.methods = methodsBuilder.build();
this.props = propsBuilder.build();
Collections.sort(propsLocal, TypeAndOrdinalColumnComparator.INSTANCE);
this.orderedProps = ImmutableList.copyOf(propsLocal);
validateOrdinals();
cacheable = (null != iface.getDeclaredAnnotation(Cacheable.class));
}
@Override
public HelenusEntityType getType() {
return type;
}
@Override
public boolean isCacheable() {
return cacheable;
}
@Override
public Class<?> getMappingInterface() {
return iface;
}
@Override
public Collection<HelenusProperty> getOrderedProperties() {
return orderedProps;
}
@Override
public HelenusProperty getProperty(String name) {
HelenusProperty property = props.get(name);
if (property == null && methods.containsKey(name)) {
property = new HelenusMappingProperty(this, methods.get(name), new DefaultMetadata());
return property; //TODO(gburd): review adding these into the props map...
}
return props.get(name);
}
@Override
public IdentityName getName() {
return name;
}
private static IdentityName resolveName(Class<?> iface, HelenusEntityType type) {
switch (type) {
case TABLE :
return MappingUtil.getTableName(iface, true);
case TUPLE :
return IdentityName.of(MappingUtil.getDefaultEntityName(iface), false);
case UDT :
return MappingUtil.getUserDefinedTypeName(iface, true);
}
throw new HelenusMappingException("invalid entity type " + type + " in " + type);
}
private static HelenusEntityType autoDetectType(Class<?> iface) {
Objects.requireNonNull(iface, "empty iface");
if (null != iface.getDeclaredAnnotation(Table.class)) {
return HelenusEntityType.TABLE;
}
else if (null != iface.getDeclaredAnnotation(Tuple.class)) {
return HelenusEntityType.TUPLE;
}
else if (null != iface.getDeclaredAnnotation(UDT.class)) {
return HelenusEntityType.UDT;
}
throw new HelenusMappingException("entity must be annotated by @Table or @Tuple or @UserDefinedType " + iface);
}
private void validateOrdinals() {
switch (getType()) {
case TABLE :
validateOrdinalsForTable();
break;
case TUPLE :
validateOrdinalsInTuple();
break;
default :
break;
}
}
private void validateOrdinalsForTable() {
BitSet partitionKeys = new BitSet();
BitSet clusteringColumns = new BitSet();
for (HelenusProperty prop : getOrderedProperties()) {
ColumnType type = prop.getColumnType();
int ordinal = prop.getOrdinal();
switch (type) {
case PARTITION_KEY :
if (partitionKeys.get(ordinal)) {
throw new HelenusMappingException(
"detected two or more partition key columns with the same ordinal " + ordinal + " in "
+ prop.getEntity());
}
partitionKeys.set(ordinal);
break;
case CLUSTERING_COLUMN :
if (clusteringColumns.get(ordinal)) {
throw new HelenusMappingException("detected two or clustering columns with the same ordinal "
+ ordinal + " in " + prop.getEntity());
}
clusteringColumns.set(ordinal);
break;
default :
break;
}
}
}
private void validateOrdinalsInTuple() {
boolean[] ordinals = new boolean[props.size()];
getOrderedProperties().forEach(p -> {
int ordinal = p.getOrdinal();
if (ordinal < 0 || ordinal >= ordinals.length) {
throw new HelenusMappingException("invalid ordinal " + ordinal + " found for property "
+ p.getPropertyName() + " in " + p.getEntity());
}
if (ordinals[ordinal]) {
throw new HelenusMappingException(
"detected two or more properties with the same ordinal " + ordinal + " in " + p.getEntity());
}
ordinals[ordinal] = true;
});
for (int i = 0; i != ordinals.length; ++i) {
if (!ordinals[i]) {
throw new HelenusMappingException("detected absent ordinal " + i + " in " + this);
}
}
}
@Override
public String toString() {
StringBuilder str = new StringBuilder();
str.append(iface.getSimpleName()).append("(").append(name.getName()).append(") ")
.append(type.name().toLowerCase()).append(":\n");
for (HelenusProperty prop : getOrderedProperties()) {
str.append(prop.toString());
str.append("\n");
}
return str.toString();
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.mapping;
package net.helenus.mapping;
import java.lang.annotation.Annotation;
import java.lang.reflect.Method;
@ -23,51 +23,55 @@ import java.util.function.Function;
import javax.validation.ConstraintValidator;
import com.noorq.casser.core.SessionRepository;
import com.noorq.casser.mapping.javatype.AbstractJavaType;
import com.noorq.casser.mapping.javatype.MappingJavaTypes;
import com.noorq.casser.mapping.type.AbstractDataType;
import com.datastax.driver.core.Metadata;
import net.helenus.core.SessionRepository;
import net.helenus.mapping.javatype.AbstractJavaType;
import net.helenus.mapping.javatype.MappingJavaTypes;
import net.helenus.mapping.type.AbstractDataType;
public final class CasserMappingProperty implements CasserProperty {
public final class HelenusMappingProperty implements HelenusProperty {
private final CasserEntity entity;
private final HelenusEntity entity;
private final Method getter;
private final String propertyName;
private final Optional<IdentityName> indexName;
private final boolean caseSensitiveIndex;
private final ColumnInformation columnInfo;
private final Type genericJavaType;
private final Class<?> javaType;
private final AbstractJavaType abstractJavaType;
private final AbstractDataType dataType;
private volatile Optional<Function<Object, Object>> readConverter = null;
private volatile Optional<Function<Object, Object>> writeConverter = null;
private final ConstraintValidator<? extends Annotation, ?>[] validators;
public CasserMappingProperty(CasserMappingEntity entity, Method getter) {
public HelenusMappingProperty(HelenusMappingEntity entity, Method getter, Metadata metadata) {
this.entity = entity;
this.getter = getter;
this.propertyName = MappingUtil.getPropertyName(getter);
this.indexName = MappingUtil.getIndexName(getter);
this.caseSensitiveIndex = MappingUtil.caseSensitiveIndex(getter);
this.columnInfo = new ColumnInformation(getter);
this.genericJavaType = getter.getGenericReturnType();
this.javaType = getter.getReturnType();
this.abstractJavaType = MappingJavaTypes.resolveJavaType(this.javaType);
this.dataType = abstractJavaType.resolveDataType(this.getter, this.genericJavaType, this.columnInfo.getColumnType());
this.dataType = abstractJavaType.resolveDataType(this.getter, this.genericJavaType,
this.columnInfo.getColumnType(), metadata);
this.validators = MappingUtil.getValidators(getter);
}
@Override
public CasserEntity getEntity() {
public HelenusEntity getEntity() {
return entity;
}
@ -75,7 +79,7 @@ public final class CasserMappingProperty implements CasserProperty {
public Class<?> getJavaType() {
return (Class<?>) javaType;
}
@Override
public AbstractDataType getDataType() {
return dataType;
@ -85,7 +89,7 @@ public final class CasserMappingProperty implements CasserProperty {
public ColumnType getColumnType() {
return columnInfo.getColumnType();
}
@Override
public int getOrdinal() {
return columnInfo.getOrdinal();
@ -105,7 +109,12 @@ public final class CasserMappingProperty implements CasserProperty {
public Optional<IdentityName> getIndexName() {
return indexName;
}
@Override
public boolean caseSensitiveIndex() {
return caseSensitiveIndex;
}
@Override
public String getPropertyName() {
return propertyName;
@ -115,24 +124,24 @@ public final class CasserMappingProperty implements CasserProperty {
public Method getGetterMethod() {
return getter;
}
@Override
public Optional<Function<Object, Object>> getReadConverter(SessionRepository repository) {
if (readConverter == null) {
readConverter = abstractJavaType.resolveReadConverter(this.dataType, repository);
}
return readConverter;
}
@Override
public Optional<Function<Object, Object>> getWriteConverter(SessionRepository repository) {
if (writeConverter == null) {
writeConverter = abstractJavaType.resolveWriteConverter(this.dataType, repository);
}
return writeConverter;
}
@ -143,9 +152,9 @@ public final class CasserMappingProperty implements CasserProperty {
@Override
public String toString() {
StringBuilder str = new StringBuilder();
String columnName = this.getColumnName().getName();
str.append(" ");
str.append(this.getDataType());
@ -156,43 +165,42 @@ public final class CasserMappingProperty implements CasserProperty {
str.append(columnName);
}
str.append(") ");
ColumnType type = this.getColumnType();
switch(type) {
case PARTITION_KEY:
str.append("partition_key[");
str.append(this.getOrdinal());
str.append("] ");
break;
case CLUSTERING_COLUMN:
str.append("clustering_column[");
str.append(this.getOrdinal());
str.append("] ");
OrderingDirection od = this.getOrdering();
if (od != null) {
str.append(od.name().toLowerCase()).append(" ");
}
break;
case STATIC_COLUMN:
str.append("static ");
break;
case COLUMN:
break;
switch (type) {
case PARTITION_KEY :
str.append("partition_key[");
str.append(this.getOrdinal());
str.append("] ");
break;
case CLUSTERING_COLUMN :
str.append("clustering_column[");
str.append(this.getOrdinal());
str.append("] ");
OrderingDirection od = this.getOrdering();
if (od != null) {
str.append(od.name().toLowerCase()).append(" ");
}
break;
case STATIC_COLUMN :
str.append("static ");
break;
case COLUMN :
break;
}
Optional<IdentityName> idx = this.getIndexName();
if (idx.isPresent()) {
str.append("index(").append(idx.get().getName()).append(") ");
str.append("index(").append(idx.get().getName()).append(") ");
}
return str.toString();
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.mapping;
package net.helenus.mapping;
import java.lang.annotation.Annotation;
import java.lang.reflect.Method;
@ -22,35 +22,37 @@ import java.util.function.Function;
import javax.validation.ConstraintValidator;
import com.noorq.casser.core.SessionRepository;
import com.noorq.casser.mapping.type.AbstractDataType;
import net.helenus.core.SessionRepository;
import net.helenus.mapping.type.AbstractDataType;
public interface CasserProperty {
public interface HelenusProperty {
CasserEntity getEntity();
HelenusEntity getEntity();
String getPropertyName();
String getPropertyName();
Method getGetterMethod();
IdentityName getColumnName();
Optional<IdentityName> getIndexName();
boolean caseSensitiveIndex();
Class<?> getJavaType();
AbstractDataType getDataType();
ColumnType getColumnType();
int getOrdinal();
OrderingDirection getOrdering();
Optional<Function<Object, Object>> getReadConverter(SessionRepository repository);
Optional<Function<Object, Object>> getWriteConverter(SessionRepository repository);
ConstraintValidator<? extends Annotation, ?>[] getValidators();
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,16 +13,16 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.mapping;
package net.helenus.mapping;
import com.noorq.casser.support.CqlUtil;
import net.helenus.support.CqlUtil;
public final class IdentityName {
private final String name;
private final boolean forceQuote;
public IdentityName(String name, boolean forceQuote) {
this.name = name.toLowerCase();
this.forceQuote = forceQuote;
@ -31,7 +31,7 @@ public final class IdentityName {
public static IdentityName of(String name, boolean forceQuote) {
return new IdentityName(name, forceQuote);
}
public String getName() {
return name;
}
@ -43,12 +43,11 @@ public final class IdentityName {
public String toCql(boolean overrideForceQuote) {
if (overrideForceQuote) {
return CqlUtil.forceQuote(name);
}
else {
} else {
return name;
}
}
public String toCql() {
return toCql(forceQuote);
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.mapping;
package net.helenus.mapping;
import java.lang.annotation.Annotation;
import java.lang.reflect.Method;
@ -24,96 +24,90 @@ import java.util.Optional;
import javax.validation.Constraint;
import javax.validation.ConstraintValidator;
import com.noorq.casser.core.Casser;
import com.noorq.casser.core.Getter;
import com.noorq.casser.core.reflect.CasserPropertyNode;
import com.noorq.casser.core.reflect.DslExportable;
import com.noorq.casser.core.reflect.ListDsl;
import com.noorq.casser.core.reflect.MapDsl;
import com.noorq.casser.core.reflect.MapExportable;
import com.noorq.casser.core.reflect.ReflectionInstantiator;
import com.noorq.casser.core.reflect.SetDsl;
import com.noorq.casser.mapping.annotation.Index;
import com.noorq.casser.mapping.annotation.Table;
import com.noorq.casser.mapping.annotation.Tuple;
import com.noorq.casser.mapping.annotation.UDT;
import com.noorq.casser.support.CasserMappingException;
import com.noorq.casser.support.DslPropertyException;
import net.helenus.core.Getter;
import net.helenus.core.Helenus;
import net.helenus.core.reflect.*;
import net.helenus.mapping.annotation.Index;
import net.helenus.mapping.annotation.Table;
import net.helenus.mapping.annotation.Tuple;
import net.helenus.mapping.annotation.UDT;
import net.helenus.support.DslPropertyException;
import net.helenus.support.HelenusMappingException;
public final class MappingUtil {
@SuppressWarnings("unchecked")
public static final ConstraintValidator<? extends Annotation, ?>[] EMPTY_VALIDATORS = new ConstraintValidator[0];
private MappingUtil() {
}
public static ConstraintValidator<? extends Annotation, ?>[] getValidators(Method getterMethod) {
List<ConstraintValidator<? extends Annotation, ?>> list = null;
for (Annotation constraintAnnotation : getterMethod.getDeclaredAnnotations()) {
list = addValidators(constraintAnnotation, list);
Class<? extends Annotation> annotationType = constraintAnnotation.annotationType();
for (Annotation possibleConstraint : annotationType.getDeclaredAnnotations()) {
list = addValidators(possibleConstraint, list);
}
}
if (list == null) {
return EMPTY_VALIDATORS;
}
else {
} else {
return list.toArray(EMPTY_VALIDATORS);
}
}
private static List<ConstraintValidator<? extends Annotation, ?>> addValidators(Annotation constraintAnnotation, List<ConstraintValidator<? extends Annotation, ?>> list) {
private static List<ConstraintValidator<? extends Annotation, ?>> addValidators(Annotation constraintAnnotation,
List<ConstraintValidator<? extends Annotation, ?>> list) {
Class<? extends Annotation> annotationType = constraintAnnotation.annotationType();
for (Annotation possibleConstraint : annotationType.getDeclaredAnnotations()) {
if (possibleConstraint instanceof Constraint) {
Constraint constraint = (Constraint) possibleConstraint;
Constraint constraint = (Constraint) possibleConstraint;
for (Class<? extends ConstraintValidator<?, ?>> clazz : constraint.validatedBy()) {
ConstraintValidator<? extends Annotation, ?> validator = ReflectionInstantiator.instantiateClass(clazz);
ConstraintValidator<? extends Annotation, ?> validator = ReflectionInstantiator
.instantiateClass(clazz);
((ConstraintValidator) validator).initialize(constraintAnnotation);
if (list == null) {
list = new ArrayList<ConstraintValidator<? extends Annotation, ?>>();
}
list.add(validator);
}
}
}
return list;
}
public static Optional<IdentityName> getIndexName(Method getterMethod) {
String indexName = null;
boolean forceQuote = false;
Index index = getterMethod.getDeclaredAnnotation(Index.class);
if (index != null) {
indexName = index.value();
forceQuote = index.forceQuote();
@ -127,63 +121,68 @@ public final class MappingUtil {
return indexName != null ? Optional.of(new IdentityName(indexName, forceQuote)) : Optional.empty();
}
public static boolean caseSensitiveIndex(Method getterMethod) {
Index index = getterMethod.getDeclaredAnnotation(Index.class);
if (index != null) {
return index.caseSensitive();
}
return false;
}
public static String getPropertyName(Method getter) {
return getter.getName();
}
public static String getDefaultColumnName(Method getter) {
return Casser.settings().getPropertyToColumnConverter()
.apply(getPropertyName(getter));
return Helenus.settings().getPropertyToColumnConverter().apply(getPropertyName(getter));
}
public static IdentityName getUserDefinedTypeName(Class<?> iface, boolean required) {
String userTypeName = null;
boolean forceQuote = false;
UDT userDefinedType = iface
.getDeclaredAnnotation(UDT.class);
UDT userDefinedType = iface.getDeclaredAnnotation(UDT.class);
if (userDefinedType != null) {
userTypeName = userDefinedType.value();
forceQuote = userDefinedType.forceQuote();
if (userTypeName == null || userTypeName.isEmpty()) {
userTypeName = getDefaultEntityName(iface);
}
return new IdentityName(userTypeName, forceQuote);
}
}
if (required) {
throw new CasserMappingException(
"entity must have annotation @UserDefinedType " + iface);
throw new HelenusMappingException("entity must have annotation @UserDefinedType " + iface);
}
return null;
}
public static boolean isTuple(Class<?> iface) {
Tuple tuple = iface
.getDeclaredAnnotation(Tuple.class);
Tuple tuple = iface.getDeclaredAnnotation(Tuple.class);
return tuple != null;
}
public static boolean isUDT(Class<?> iface) {
UDT udt = iface
.getDeclaredAnnotation(UDT.class);
UDT udt = iface.getDeclaredAnnotation(UDT.class);
return udt != null;
}
public static IdentityName getTableName(Class<?> iface, boolean required) {
String tableName = null;
@ -196,8 +195,7 @@ public final class MappingUtil {
forceQuote = table.forceQuote();
} else if (required) {
throw new CasserMappingException(
"entity must have annotation @Table " + iface);
throw new HelenusMappingException("entity must have annotation @Table " + iface);
}
if (tableName == null || tableName.isEmpty()) {
@ -208,8 +206,7 @@ public final class MappingUtil {
}
public static String getDefaultEntityName(Class<?> iface) {
return Casser.settings().getPropertyToColumnConverter()
.apply(iface.getSimpleName());
return Helenus.settings().getPropertyToColumnConverter().apply(iface.getSimpleName());
}
public static Class<?> getMappingInterface(Object pojo) {
@ -220,53 +217,50 @@ public final class MappingUtil {
iface = (Class<?>) pojo;
if (!iface.isInterface()) {
throw new CasserMappingException("expected interface " + iface);
throw new HelenusMappingException("expected interface " + iface);
}
} else {
Class<?>[] ifaces = pojo.getClass().getInterfaces();
int len = ifaces.length;
for (int i = 0; i != len; ++i) {
iface = ifaces[0];
if (MapExportable.class.isAssignableFrom(iface)) {
continue;
}
if (iface.getDeclaredAnnotation(Table.class) != null ||
iface.getDeclaredAnnotation(UDT.class) != null ||
iface.getDeclaredAnnotation(Tuple.class) != null) {
if (iface.getDeclaredAnnotation(Table.class) != null || iface.getDeclaredAnnotation(UDT.class) != null
|| iface.getDeclaredAnnotation(Tuple.class) != null) {
break;
}
}
}
if (iface == null) {
throw new CasserMappingException("dsl interface not found for " + pojo);
throw new HelenusMappingException("dsl interface not found for " + pojo);
}
return iface;
}
public static CasserPropertyNode resolveMappingProperty(
Getter<?> getter) {
public static HelenusPropertyNode resolveMappingProperty(Getter<?> getter) {
try {
Object childDsl = getter.get();
if (childDsl instanceof DslExportable) {
DslExportable e = (DslExportable) childDsl;
return e.getParentDslCasserPropertyNode();
return e.getParentDslHelenusPropertyNode();
}
else if (childDsl instanceof MapDsl) {
MapDsl mapDsl = (MapDsl) childDsl;
return mapDsl.getParent();
@ -282,14 +276,12 @@ public final class MappingUtil {
return setDsl.getParent();
}
throw new CasserMappingException(
"getter must reference to the dsl object " + getter);
throw new HelenusMappingException("getter must reference to the dsl object " + getter);
} catch (DslPropertyException e) {
return e.getPropertyNode();
}
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,9 +13,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.mapping;
package net.helenus.mapping;
import com.noorq.casser.support.CasserMappingException;
import net.helenus.support.HelenusMappingException;
public enum OrderingDirection {
@ -32,10 +32,9 @@ public enum OrderingDirection {
public String cql() {
return cql;
}
public static OrderingDirection parseString(String name) {
if (ASC.cql.equalsIgnoreCase(name)) {
return ASC;
}
@ -43,8 +42,8 @@ public enum OrderingDirection {
else if (DESC.cql.equalsIgnoreCase(name)) {
return DESC;
}
throw new CasserMappingException("invalid ordering direction name " + name);
throw new HelenusMappingException("invalid ordering direction name " + name);
}
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,23 +13,23 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.mapping;
package net.helenus.mapping;
import java.util.Comparator;
public enum TypeAndOrdinalColumnComparator implements Comparator<CasserProperty> {
public enum TypeAndOrdinalColumnComparator implements Comparator<HelenusProperty> {
INSTANCE;
public int compare(CasserProperty thisVal, CasserProperty anotherVal) {
public int compare(HelenusProperty thisVal, HelenusProperty anotherVal) {
int c = Integer.compare(thisVal.getColumnType().ordinal(), anotherVal.getColumnType().ordinal());
if (c == 0) {
c = Integer.compare(thisVal.getOrdinal(), anotherVal.getOrdinal());
}
return c;
}
}

View file

@ -0,0 +1,111 @@
/*
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.helenus.mapping.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import net.helenus.mapping.OrderingDirection;
/**
* ClusteringColumn is the family column in legacy Cassandra API
*
* The purpose of this column is have additional dimension in the table.
* Both @PartitionKey and @ClusteringColumn together are parts of the primary
* key of the table. The primary difference between them is that the first one
* is using for routing purposes in order to locate a data node in the cluster,
* otherwise the second one is using inside the node to locate peace of data in
* concrete machine.
*
* ClusteringColumn can be represented as a Key in SortedMap that fully stored
* in a single node. All developers must be careful for selecting fields for
* clustering columns, because all data inside this SortedMap must fit in to one
* node.
*
* ClusteringColumn can have more than one part and the order of parts is
* important. This order defines the way how Cassandra joins the parts and
* influence of data retrieval operations. Each part can have ordering property
* that defines default ascending or descending order of data. In case of two
* and more parts in select queries developer needs to have consisdent order of
* all parts as they defined in table.
*
* For example, first part is ASC ordering, second is also ASC, so Cassandra
* will sort entries like this: a-a a-b b-a b-b In this case we are able run
* queries: ORDER BY first ASC, second ASC ORDER BY first DESC, second DESC
* WHERE first=? ORDER BY second ASC WHERE first=? ORDER BY second DESC WHERE
* first=? AND second=?
*
* But, we can not run queries: ORDER BY first DESC, second ASC ORDER BY first
* ASC, second DESC WHERE second=? ORDER BY first (ASC,DESC)
*
*
*/
@Retention(value = RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface ClusteringColumn {
/**
* Default value is the name of the method normalized to underscore
*
* @return name of the column
*/
String value() default "";
/**
* ClusteringColumn parts must be ordered in the @Table. It is the requirement
* of Cassandra. Cassandra joins all parts to the final clustering key that is
* stored in column family name. Additionally all parts can have some ordering
* (ASC, DESC) that with sequence of parts determines key comparison function,
* so Cassandra storing column family names always in sorted order.
*
* Be default ordinal has 0 value, that's because in most cases @Table have
* single column for ClusteringColumn If you have 2 and more parts of the
* ClusteringColumn, then you need to use ordinal() to define the sequence of
* the parts
*
* @return number that used to sort clustering columns
*/
int ordinal() default 0;
/**
* Default order of values in the ClusteringColumn This ordering is using for
* comparison of the clustering column values when Cassandra stores it in the
* sorted order.
*
* Default value is the ascending order
*
* @return ascending order or descending order of clustering column values
*/
OrderingDirection ordering() default OrderingDirection.ASC;
/**
* For reserved words in Cassandra we need quotation in CQL queries. This
* property marks that the name of the UDT type needs to be quoted.
*
* Default value is false, we are quoting only selected names.
*
* @return true if name have to be quoted
*/
boolean forceQuote() default false;
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,61 +13,58 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.mapping.annotation;
package net.helenus.mapping.annotation;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.lang.annotation.*;
/**
* Column annotation is used to define additional properties of the column
* in entity mapping interfaces: @Table, @UDT, @Tuple
*
* Column annotation is used to define additional properties of the column in
* entity mapping interfaces: @Table, @UDT, @Tuple
*
* Column annotation can be used to override default name of the column or to
* setup order of the columns in the mapping
*
*
* Usually for @Table and @UDT types it is not important to define order of the
* columns, but in @Tuple mapping it is required, because tuple itself represents the
* sequence of the types with particular order in the table's column
*
* @author Alex Shvid
* columns, but in @Tuple mapping it is required, because tuple itself
* represents the sequence of the types with particular order in the table's
* column
*
*
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Column {
/**
* Default value is the name of the method normalized to underscore
*
* Default value is the name of the method normalized to underscore
*
* @return name of the column
*/
String value() default "";
/**
* Ordinal will be used for ascending sorting of columns
*
* Default value is 0, because not all mapping entities require all fields to have
* unique ordinals, only @Tuple mapping entity requires all of them to be unique.
*
*
* Default value is 0, because not all mapping entities require all fields to
* have unique ordinals, only @Tuple mapping entity requires all of them to be
* unique.
*
* @return number that used to sort columns, usually for @Tuple only
*/
int ordinal() default 0;
/**
* For reserved words in Cassandra we need quotation in CQL queries. This property marks that
* the name of the UDT type needs to be quoted.
*
* For reserved words in Cassandra we need quotation in CQL queries. This
* property marks that the name of the UDT type needs to be quoted.
*
* Default value is false, we are quoting only selected names.
*
*
* @return true if name have to be quoted
*/
boolean forceQuote() default false;
}

View file

@ -0,0 +1,270 @@
/*
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.helenus.mapping.annotation;
import java.lang.annotation.*;
import javax.validation.Constraint;
import net.helenus.mapping.validator.*;
/**
* Constraint annotations are using for data integrity mostly
* for @java.lang.String types. The place of the annotation is the particular
* method in model interface.
*
* All of them does not have effect on selects and data retrieval operations.
*
* Support types: - @NotNull supports any @java.lang.Object type - All
* annotations support @java.lang.String type
*
*
*/
public final class Constraints {
private Constraints() {
}
/**
* NotNull annotation is using to check that value is not null before storing it
*
* Applicable to use in any @java.lang.Object
*
* It does not check on selects and data retrieval operations
*
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = NotNullValidator.class)
public @interface NotNull {
}
/**
* NotEmpty annotation is using to check that value has text before storing it
*
* Also checks for the null and it is more strict annotation then @NotNull
*
* Can be used for @java.lang.CharSequence, @ByteBuffer and any array
*
* It does not check on selects and data retrieval operations
*
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = NotEmptyValidator.class)
public @interface NotEmpty {
}
/**
* Email annotation is using to check that value has a valid email before
* storing it
*
* Can be used only for @CharSequence
*
* It does not check on selects and data retrieval operations
*
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = EmailValidator.class)
public @interface Email {
}
/**
* Number annotation is using to check that all letters in value are digits
* before storing it
*
* Can be used only for @java.lang.CharSequence
*
* It does not check on selects and data retrieval operations
*
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = NumberValidator.class)
public @interface Number {
}
/**
* Alphabet annotation is using to check that all letters in value are in
* specific alphabet before storing it
*
* Can be used only for @java.lang.CharSequence
*
* It does not check on selects and data retrieval operations
*
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = AlphabetValidator.class)
public @interface Alphabet {
/**
* Defines alphabet that will be used to check value
*
* @return alphabet characters in the string
*/
String value();
}
/**
* Length annotation is using to ensure that value has exact length before
* storing it
*
* Can be used for @java.lang.CharSequence, @ByteBuffer and any array
*
* It does not have effect on selects and data retrieval operations
*
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = LengthValidator.class)
public @interface Length {
int value();
}
/**
* MaxLength annotation is using to ensure that value has length less or equal
* to some threshold before storing it
*
* Can be used for @java.lang.CharSequence, @ByteBuffer and byte[]
*
* It does not have effect on selects and data retrieval operations
*
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = MaxLengthValidator.class)
public @interface MaxLength {
int value();
}
/**
* MinLength annotation is using to ensure that value has length greater or
* equal to some threshold before storing it
*
* Can be used for @java.lang.CharSequence, @ByteBuffer and byte[]
*
* It does not have effect on selects and data retrieval operations
*
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = MinLengthValidator.class)
public @interface MinLength {
int value();
}
/**
* LowerCase annotation is using to ensure that value is in lower case before
* storing it
*
* Can be used only for @java.lang.CharSequence
*
* It does not have effect on selects and data retrieval operations
*
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = LowerCaseValidator.class)
public @interface LowerCase {
}
/**
* UpperCase annotation is using to ensure that value is in upper case before
* storing it
*
* Can be used only for @java.lang.CharSequence
*
* It does not have effect on selects and data retrieval operations
*
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = UpperCaseValidator.class)
public @interface UpperCase {
}
/**
* Pattern annotation is LowerCase annotation is using to ensure that value is
* upper case before storing it
*
* Can be used only for @java.lang.CharSequence
*
* It does not have effect on selects and data retrieval operations
*
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = PatternValidator.class)
public @interface Pattern {
/**
* User defined regex expression to check match of the value
*
* @return Java regex pattern
*/
String value();
/**
* Regex flags composition
*
* @return Java regex flags
*/
int flags();
}
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,50 +13,57 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.mapping.annotation;
package net.helenus.mapping.annotation;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.lang.annotation.*;
/**
* Index annotation is using under the specific column or method in entity interface with @Table annotation.
*
* The corresponding secondary index will be created in the underline @Table for the specific column.
*
* Currently Cassandra supports only single column index, so this index works only for single column.
*
* Make sure that you are using low cardinality columns for this index, that is the requirement of the Cassandra.
* Low cardinality fields examples: gender, country, age, status and etc
* High cardinality fields examples: id, email, timestamp, UUID and etc
*
* @author Alex Shvid
* Index annotation is using under the specific column or method in entity
* interface with @Table annotation.
*
* The corresponding secondary index will be created in the underline @Table for
* the specific column.
*
* Currently Cassandra supports only single column index, so this index works
* only for single column.
*
* Make sure that you are using low cardinality columns for this index, that is
* the requirement of the Cassandra. Low cardinality fields examples: gender,
* country, age, status and etc High cardinality fields examples: id, email,
* timestamp, UUID and etc
*
*
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Index {
/**
* Defined the name of the index. By default will be used the column name.
*
*
* @return name of the index
*/
String value() default "";
/**
* For reserved words in Cassandra we need quotation in CQL queries. This property marks that
* the name of the UDT type needs to be quoted.
*
* For reserved words in Cassandra we need quotation in CQL queries. This
* property marks that the name of the UDT type needs to be quoted.
*
* Default value is false, we are quoting only selected names.
*
*
* @return true if name have to be quoted
*/
*/
boolean forceQuote() default false;
/**
* Create a case-insensitive index using Cassandra 3.x+ support for SASI indexing.
*
* @return true if the index should ignore case when comparing
*/
boolean caseSensitive() default true;
}

View file

@ -0,0 +1,32 @@
/*
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.helenus.mapping.annotation;
import java.lang.annotation.*;
/**
* Inherited Entity annotation
*
* Inherited Table annotation is used to indicate that the methods should also be mapped
*
*/
@Inherited
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.TYPE})
public @interface InheritedTable {
String value() default "";
}

View file

@ -1,5 +1,5 @@
/*
* Copyright (C) 2015 The Casser Authors
* Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noorq.casser.mapping.annotation;
package net.helenus.mapping.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
@ -21,57 +21,58 @@ import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* PartitionKey annotation is using to define that particular column is the part of
* partition key in the table.
*
* Partition Key is the routing key. Cassandra is using it to find the primary data node
* in the cluster that holds data. Cassandra combines all parts of the partition key to
* byte array and then calculates hash function by using good distribution algorithm (by default
* MurMur3). After that it uses hash number as a token in the ring to find a virtual
* and then a physical data server.
*
* For @Table mapping entity it is required to have as minimum one PartitionKey column.
* For @UDT and @Tuple mapping entities @PartitionKey annotation is not using.
*
* @author Alex Shvid
* PartitionKey annotation is using to define that particular column is the part
* of partition key in the table.
*
* Partition Key is the routing key. Cassandra is using it to find the primary
* data node in the cluster that holds data. Cassandra combines all parts of the
* partition key to byte array and then calculates hash function by using good
* distribution algorithm (by default MurMur3). After that it uses hash number
* as a token in the ring to find a virtual and then a physical data server.
*
* For @Table mapping entity it is required to have as minimum one PartitionKey
* column. For @UDT and @Tuple mapping entities @PartitionKey annotation is not
* using.
*
*
*/
@Retention(value = RetentionPolicy.RUNTIME)
@Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface PartitionKey {
/**
* Default value is the name of the method normalized to underscore
*
* Default value is the name of the method normalized to underscore
*
* @return name of the column
*/
String value() default "";
/**
* PartitionKey parts must be ordered in the @Table. It is the requirement of Cassandra.
* That is how the partition key calculation works, column parts will be joined based on some order
* and final hash/token will be calculated.
*
* Be default ordinal has 0 value, that's because in most cases @Table have single column for @PartitionKey
* If you have 2 and more parts of the PartitionKey, then you need to use ordinal() to
* define the sequence of the parts
*
* PartitionKey parts must be ordered in the @Table. It is the requirement of
* Cassandra. That is how the partition key calculation works, column parts will
* be joined based on some order and final hash/token will be calculated.
*
* Be default ordinal has 0 value, that's because in most cases @Table have
* single column for @PartitionKey If you have 2 and more parts of the
* PartitionKey, then you need to use ordinal() to define the sequence of the
* parts
*
* @return number that used to sort columns in PartitionKey
*/
int ordinal() default 0;
/**
* For reserved words in Cassandra we need quotation in CQL queries. This property marks that
* the name of the UDT type needs to be quoted.
*
* For reserved words in Cassandra we need quotation in CQL queries. This
* property marks that the name of the UDT type needs to be quoted.
*
* Default value is false, we are quoting only selected names.
*
*
* @return true if name have to be quoted
*/
boolean forceQuote() default false;
}

Some files were not shown because too many files have changed in this diff Show more