diff --git a/.idea/.name b/.idea/.name
new file mode 100644
index 0000000..cefa68f
--- /dev/null
+++ b/.idea/.name
@@ -0,0 +1 @@
+helenus-core
\ No newline at end of file
diff --git a/.idea/eclipseCodeFormatter.xml b/.idea/eclipseCodeFormatter.xml
new file mode 100644
index 0000000..4ae0669
--- /dev/null
+++ b/.idea/eclipseCodeFormatter.xml
@@ -0,0 +1,11 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/encodings.xml b/.idea/encodings.xml
new file mode 100644
index 0000000..b26911b
--- /dev/null
+++ b/.idea/encodings.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/AUTHORS b/AUTHORS
new file mode 100644
index 0000000..b2ae19e
--- /dev/null
+++ b/AUTHORS
@@ -0,0 +1,3 @@
+ * Gregory Burd @gregburd github:gburd keybase:gregburd
+ * Alex Shvid
+
diff --git a/README.md b/README.md
index 20a29e8..ad29db1 100644
--- a/README.md
+++ b/README.md
@@ -1,4 +1,4 @@
-# casser
+# helenus
Fast and easy, functional style cutting edge Java 8 and Scala 2.11 Cassandra client
Current status: First application in production (may be more)
@@ -26,8 +26,8 @@ Latest release dependency:
```
- com.noorq.casser
- casser-core
+ net.helenus
+ helenus-core
1.1.0_2.11
@@ -37,8 +37,8 @@ Active development dependency for Scala 2.11:
```
- com.noorq.casser
- casser-core
+ net.helenus
+ helenus-core
1.2.0_2.11-SNAPSHOT
@@ -77,8 +77,8 @@ public interface Timeline {
Session initialization:
```
-Timeline timeline = Casser.dsl(Timeline.class);
-CasserSession session = Casser.init(getSession()).showCql().add(Timeline.class).autoCreateDrop().get();
+Timeline timeline = Helenus.dsl(Timeline.class);
+HelenusSession session = Helenus.init(getSession()).showCql().add(Timeline.class).autoCreateDrop().get();
```
Select example:
@@ -138,7 +138,7 @@ Abstract repository:
```
public interface AbstractRepository {
- CasserSession session();
+ HelenusSession session();
}
```
@@ -149,7 +149,7 @@ import scala.concurrent.Future;
public interface AccountRepository extends AbstractRepository {
- static final Account account = Casser.dsl(Account.class);
+ static final Account account = Helenus.dsl(Account.class);
static final String DEFAULT_TIMEZONE = "America/Los_Angeles";
diff --git a/helenus-core.iml b/helenus-core.iml
new file mode 100644
index 0000000..64d67d0
--- /dev/null
+++ b/helenus-core.iml
@@ -0,0 +1,99 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/pom.xml b/pom.xml
index 3047e07..0566104 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1,410 +1,418 @@
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- 4.0.0
- com.noorq.casser
- casser-core
- 1.2.0_2.11-SNAPSHOT
- jar
+ 4.0.0
+ net.helenus
+ helenus-core
+ 2.0.0-SNAPSHOT
+ jar
- casser
- Casser Cassandra Client
- https://github.com/noorq/casser
+ helenus
+ Helenus Cassandra Client
+ https://helenus.net/
-
-
- The Apache Software License, Version 2.0
- http://www.apache.org/licenses/LICENSE-2.0.txt
- repo
-
-
+
+
+ The Apache Software License, Version 2.0
+ http://www.apache.org/licenses/LICENSE-2.0.txt
+ repo
+
+
-
- https://github.com/noorq/casser
- scm:git:git@github.com:noorq/casser.git
- scm:git:git@github.com:noorq/casser.git
-
+
+ https://github.net.helenus
+ scm:git:git@github.com:gburd/helenus.git
+ scm:git:git@github.com:gburd/helenus.git
+
-
- GitHub
- https://github.com/noorq/casser/issues
-
+
+ GitHub
+ https://github.com/gburd/helenus/issues
+
-
- org.sonatype.oss
- oss-parent
- 7
-
+
+ org.sonatype.oss
+ oss-parent
+ 7
+
-
- casser
- UTF-8
-
- 2.0.2.2
- 2.1.5
- 2.1.4
- 16.0.1
-
- 1.3
- 2.1
- 4.11
- 0.2.5
- 1.7.1
- 1.0.11
- 1.9.5
- 1.9.13
-
-
+
+ helenus
+ UTF-8
+
-
- release
+
+ release
-
+
-
+
-
- org.apache.maven.plugins
- maven-gpg-plugin
- 1.6
-
-
- sign-artifacts
- verify
-
- sign
-
-
-
-
+
+ org.apache.maven.plugins
+ maven-gpg-plugin
+ 1.6
+
+
+ sign-artifacts
+ verify
+
+ sign
+
+
+
+
-
+
-
+
-
+
-
+
-
- org.scala-lang
- scala-library
- 2.11.6
-
+
+ org.scala-lang
+ scala-library
+ 2.13.0-M1
+
-
- com.datastax.cassandra
- cassandra-driver-core
- ${cassandra-driver-core.version}
-
-
- slf4j-log4j12
- org.slf4j
-
-
- slf4j-log4j12
- org.slf4j
-
-
- guava
- com.google.guava
-
-
-
+
+ com.datastax.cassandra
+ cassandra-driver-core
+ 3.3.0
+
+
+ slf4j-log4j12
+ org.slf4j
+
+
+ slf4j-log4j12
+ org.slf4j
+
+
+ guava
+ com.google.guava
+
+
+
-
- com.google.guava
- guava
- ${guava.version}
-
+
+ org.aspectj
+ aspectjrt
+ 1.8.10
+
-
-
- javax.validation
- validation-api
- 1.1.0.Final
-
+
+ org.aspectj
+ aspectjweaver
+ 1.8.10
+
-
+
+ org.apache.commons
+ commons-lang3
+ 3.6
+
-
- org.codehaus.jackson
- jackson-mapper-asl
- ${jackson}
- test
-
+
+ org.springframework
+ spring-core
+ 4.3.10.RELEASE
+
-
- org.codehaus.jackson
- jackson-core-asl
- ${jackson}
- test
-
+
+ com.google.guava
+ guava
+ 16.0.1
+
-
- org.cassandraunit
- cassandra-unit
- ${cassandra-unit.version}
- test
-
-
- com.datastax.cassandra
- cassandra-driver-core
-
-
-
+
+
+ javax.validation
+ validation-api
+ 2.0.0.CR3
+
-
- org.apache.cassandra
- cassandra-all
- ${cassandra}
- test
-
-
- org.slf4j
- slf4j-log4j12
-
-
- ch.qos.logback
- logback-core
-
-
- ch.qos.logback
- logback-classic
-
-
- com.google.guava
- guava
-
-
-
+
-
- commons-io
- commons-io
- 2.4
- test
-
+
+ org.codehaus.jackson
+ jackson-mapper-asl
+ 1.9.13
+ test
+
-
- junit
- junit
- ${junit}
- test
-
+
+ org.codehaus.jackson
+ jackson-core-asl
+ 1.9.13
+ test
+
-
- com.github.stephenc
- jamm
- ${jamm}
- test
-
+
+ org.cassandraunit
+ cassandra-unit
+ 3.1.3.2
+ test
+
+
+ com.datastax.cassandra
+ cassandra-driver-core
+
+
+
-
- org.hamcrest
- hamcrest-library
- ${hamcrest}
- test
-
+
+ org.apache.cassandra
+ cassandra-all
+ 3.11.0
+ test
+
+
+ org.slf4j
+ slf4j-log4j12
+
+
+ ch.qos.logback
+ logback-core
+
+
+ ch.qos.logback
+ logback-classic
+
+
+ com.google.guava
+ guava
+
+
+
-
- org.hamcrest
- hamcrest-core
- ${hamcrest}
- test
-
+
+ commons-io
+ commons-io
+ 2.5
+ test
+
-
- org.mockito
- mockito-core
- ${mockito}
- test
-
+
+ junit
+ junit
+ 4.12
+ test
+
-
-
- org.slf4j
- slf4j-api
- ${slf4j}
-
+
+ com.github.stephenc
+ jamm
+ 0.2.5
+ test
+
-
- org.slf4j
- jcl-over-slf4j
- ${slf4j}
- runtime
-
+
+ org.hamcrest
+ hamcrest-library
+ 1.3
+ test
+
-
+
+ org.hamcrest
+ hamcrest-core
+ 1.3
+ test
+
-
+
+ org.mockito
+ mockito-core
+ 2.8.47
+ test
+
-
-
- src/test/resources
- true
-
- **/*
-
-
-
+
+
+ org.slf4j
+ slf4j-api
+ 1.7.1
+
-
-
- org.apache.maven.plugins
- maven-compiler-plugin
- 3.1
-
-
- 1.8
- 1.8
- 1.8
-
-
-
- org.apache.maven.plugins
- maven-source-plugin
- 2.2.1
-
-
- attach-sources
-
- jar
-
-
-
-
-
- org.apache.maven.plugins
- maven-javadoc-plugin
- 2.9.1
-
- true
- true
- true
- target/javadoc
- casser.*
-
- true
-
-
-
- attach-javadocs
-
- jar
-
-
-
-
-
- org.apache.maven.plugins
- maven-eclipse-plugin
- 2.8
-
- true
- true
- 2.0
-
-
-
- org.apache.maven.plugins
- maven-dependency-plugin
- 2.8
-
-
- org.codehaus.mojo
- build-helper-maven-plugin
- 1.8
-
-
- reserve-network-port
-
- reserve-network-port
-
- process-resources
-
-
- build.cassandra.native_transport_port
- build.cassandra.rpc_port
- build.cassandra.storage_port
- build.cassandra.ssl_storage_port
-
-
-
-
-
-
- org.apache.maven.plugins
- maven-surefire-plugin
- 2.18.1
-
- methods
- 10
- false
-
- **/test/unit/**/*.java
-
-
- **/test/integration/**/*.java
- **/test/performance/**/*.java
-
-
- src/test/resources/logging.properties
-
-
-
-
- org.bitstrings.maven.plugins
- dependencypath-maven-plugin
- 1.1.1
-
-
- set-all
-
- set
-
-
-
-
-
- org.apache.maven.plugins
- maven-failsafe-plugin
- 2.16
-
- 1
- -Xmx1024m -Xss512m
- -javaagent:${com.github.stephenc:jamm:jar}
- true
- false
-
- **/test/integration/**/*.java
-
-
- **/test/unit/**/*.java
- **/test/performance/**/*.java
-
-
- src/test/resources/logging.properties
- true
-
-
-
-
-
- integration-test
- verify
-
-
-
-
+
+ org.slf4j
+ jcl-over-slf4j
+ 1.7.1
+ runtime
+
-
-
+
+
+
+
+
+ src/test/resources
+ true
+
+ **/*
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-compiler-plugin
+ 3.1
+
+
+ 1.8
+ 1.8
+ 1.8
+
+
+
+ org.apache.maven.plugins
+ maven-source-plugin
+ 2.2.1
+
+
+ attach-sources
+
+ jar
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-javadoc-plugin
+ 2.9.1
+
+ true
+ true
+ true
+ target/javadoc
+
+ true
+
+
+
+ attach-javadocs
+
+ jar
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-eclipse-plugin
+ 2.8
+
+ true
+ true
+ 2.0
+
+
+
+ org.apache.maven.plugins
+ maven-dependency-plugin
+ 2.8
+
+
+ org.codehaus.mojo
+ build-helper-maven-plugin
+ 1.8
+
+
+ reserve-network-port
+
+ reserve-network-port
+
+ process-resources
+
+
+ build.cassandra.native_transport_port
+ build.cassandra.rpc_port
+ build.cassandra.storage_port
+ build.cassandra.ssl_storage_port
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-surefire-plugin
+ 2.18.1
+
+ methods
+ 10
+ false
+
+ **/test/unit/**/*.java
+
+
+ **/test/integration/**/*.java
+ **/test/performance/**/*.java
+
+
+ src/test/resources/logging.properties
+
+
+
+
+
+ org.bitstrings.maven.plugins
+ dependencypath-maven-plugin
+ 1.1.1
+
+
+ set-all
+
+ set
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-failsafe-plugin
+ 2.16
+
+ 1
+
+ true
+ false
+
+ **/test/integration/**/*.java
+
+
+ **/test/unit/**/*.java
+ **/test/performance/**/*.java
+
+
+ src/test/resources/logging.properties
+
+ true
+
+
+
+
+
+ integration-test
+ verify
+
+
+
+
+
+
+
diff --git a/src/main/java/com/datastax/driver/core/DefaultMetadata.java b/src/main/java/com/datastax/driver/core/DefaultMetadata.java
new file mode 100644
index 0000000..c343e29
--- /dev/null
+++ b/src/main/java/com/datastax/driver/core/DefaultMetadata.java
@@ -0,0 +1,22 @@
+package com.datastax.driver.core;
+
+import java.util.Arrays;
+import java.util.List;
+
+public class DefaultMetadata extends Metadata {
+
+ public DefaultMetadata() { super(null); }
+
+ private DefaultMetadata(Cluster.Manager cluster) {
+ super(cluster);
+ }
+
+ public TupleType newTupleType(DataType... types) {
+ return newTupleType(Arrays.asList(types));
+ }
+
+ public TupleType newTupleType(List types) {
+ return new TupleType(types, ProtocolVersion.NEWEST_SUPPORTED, CodecRegistry.DEFAULT_INSTANCE);
+ }
+
+}
diff --git a/src/main/java/com/datastax/driver/core/schemabuilder/CreateCustomIndex.java b/src/main/java/com/datastax/driver/core/schemabuilder/CreateCustomIndex.java
new file mode 100644
index 0000000..d41b6f2
--- /dev/null
+++ b/src/main/java/com/datastax/driver/core/schemabuilder/CreateCustomIndex.java
@@ -0,0 +1,132 @@
+package com.datastax.driver.core.schemabuilder;
+
+import com.google.common.base.Optional;
+
+import static com.datastax.driver.core.schemabuilder.SchemaStatement.STATEMENT_START;
+import static com.datastax.driver.core.schemabuilder.SchemaStatement.validateNotEmpty;
+import static com.datastax.driver.core.schemabuilder.SchemaStatement.validateNotKeyWord;
+
+public class CreateCustomIndex extends CreateIndex {
+
+ private String indexName;
+ private boolean ifNotExists = false;
+ private Optional keyspaceName = Optional.absent();
+ private String tableName;
+ private String columnName;
+ private boolean keys;
+
+ CreateCustomIndex(String indexName) {
+ super(indexName);
+ validateNotEmpty(indexName, "Index name");
+ validateNotKeyWord(indexName, String.format("The index name '%s' is not allowed because it is a reserved keyword", indexName));
+ this.indexName = indexName;
+ }
+
+ /**
+ * Add the 'IF NOT EXISTS' condition to this CREATE INDEX statement.
+ *
+ * @return this CREATE INDEX statement.
+ */
+ public CreateIndex ifNotExists() {
+ this.ifNotExists = true;
+ return this;
+ }
+
+ /**
+ * Specify the keyspace and table to create the index on.
+ *
+ * @param keyspaceName the keyspace name.
+ * @param tableName the table name.
+ * @return a {@link CreateIndex.CreateIndexOn} that will allow the specification of the column.
+ */
+ public CreateIndex.CreateIndexOn onTable(String keyspaceName, String tableName) {
+ validateNotEmpty(keyspaceName, "Keyspace name");
+ validateNotEmpty(tableName, "Table name");
+ validateNotKeyWord(keyspaceName, String.format("The keyspace name '%s' is not allowed because it is a reserved keyword", keyspaceName));
+ validateNotKeyWord(tableName, String.format("The table name '%s' is not allowed because it is a reserved keyword", tableName));
+ this.keyspaceName = Optional.fromNullable(keyspaceName);
+ this.tableName = tableName;
+ return new CreateCustomIndex.CreateIndexOn();
+ }
+
+ /**
+ * Specify the table to create the index on.
+ *
+ * @param tableName the table name.
+ * @return a {@link CreateIndex.CreateIndexOn} that will allow the specification of the column.
+ */
+ public CreateIndex.CreateIndexOn onTable(String tableName) {
+ validateNotEmpty(tableName, "Table name");
+ validateNotKeyWord(tableName, String.format("The table name '%s' is not allowed because it is a reserved keyword", tableName));
+ this.tableName = tableName;
+ return new CreateCustomIndex.CreateIndexOn();
+ }
+
+ public class CreateIndexOn extends CreateIndex.CreateIndexOn {
+ /**
+ * Specify the column to create the index on.
+ *
+ * @param columnName the column name.
+ * @return the final CREATE INDEX statement.
+ */
+ public SchemaStatement andColumn(String columnName) {
+ validateNotEmpty(columnName, "Column name");
+ validateNotKeyWord(columnName, String.format("The column name '%s' is not allowed because it is a reserved keyword", columnName));
+ CreateCustomIndex.this.columnName = columnName;
+ return SchemaStatement.fromQueryString(buildInternal());
+ }
+
+ /**
+ * Create an index on the keys of the given map column.
+ *
+ * @param columnName the column name.
+ * @return the final CREATE INDEX statement.
+ */
+ public SchemaStatement andKeysOfColumn(String columnName) {
+ validateNotEmpty(columnName, "Column name");
+ validateNotKeyWord(columnName, String.format("The column name '%s' is not allowed because it is a reserved keyword", columnName));
+ CreateCustomIndex.this.columnName = columnName;
+ CreateCustomIndex.this.keys = true;
+ return SchemaStatement.fromQueryString(buildInternal());
+ }
+ }
+
+ String getCustomClassName() { return ""; }
+ String getOptions() { return ""; }
+
+ @Override
+ public String buildInternal() {
+ StringBuilder createStatement = new StringBuilder(STATEMENT_START).append("CREATE CUSTOM INDEX ");
+
+ if (ifNotExists) {
+ createStatement.append("IF NOT EXISTS ");
+ }
+
+ createStatement.append(indexName).append(" ON ");
+
+ if (keyspaceName.isPresent()) {
+ createStatement.append(keyspaceName.get()).append(".");
+ }
+ createStatement.append(tableName);
+
+ createStatement.append("(");
+ if (keys) {
+ createStatement.append("KEYS(");
+ }
+
+ createStatement.append(columnName);
+
+ if (keys) {
+ createStatement.append(")");
+ }
+ createStatement.append(")");
+
+ createStatement.append(" USING '");
+ createStatement.append(getCustomClassName());
+ createStatement.append("' WITH OPTIONS = {");
+ createStatement.append(getOptions());
+ createStatement.append(" }");
+
+ return createStatement.toString();
+ }
+}
diff --git a/src/main/java/com/datastax/driver/core/schemabuilder/CreateSasiIndex.java b/src/main/java/com/datastax/driver/core/schemabuilder/CreateSasiIndex.java
new file mode 100644
index 0000000..f5f5a69
--- /dev/null
+++ b/src/main/java/com/datastax/driver/core/schemabuilder/CreateSasiIndex.java
@@ -0,0 +1,18 @@
+package com.datastax.driver.core.schemabuilder;
+
+public class CreateSasiIndex extends CreateCustomIndex {
+
+ public CreateSasiIndex(String indexName) {
+ super(indexName);
+ }
+
+ String getCustomClassName() {
+ return "org.apache.cassandra.index.sasi.SASIIndex";
+ }
+
+ String getOptions() {
+ return "'analyzer_class': "
+ + "'org.apache.cassandra.index.sasi.analyzer.NonTokenizingAnalyzer', "
+ + "'case_sensitive': 'false'";
+ }
+}
diff --git a/src/main/java/com/datastax/driver/core/schemabuilder/CreateTable.java b/src/main/java/com/datastax/driver/core/schemabuilder/CreateTable.java
new file mode 100644
index 0000000..00bebe2
--- /dev/null
+++ b/src/main/java/com/datastax/driver/core/schemabuilder/CreateTable.java
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2012-2017 DataStax Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.datastax.driver.core.schemabuilder;
+
+import com.datastax.driver.core.CodecRegistry;
+
+/**
+ * A built CREATE TABLE statement.
+ */
+public class CreateTable extends Create {
+
+ public CreateTable(String keyspaceName, String tableName) {
+ super(keyspaceName, tableName);
+ }
+
+ public CreateTable(String tableName) {
+ super(tableName);
+ }
+
+ public String getQueryString(CodecRegistry codecRegistry) {
+ return buildInternal();
+ }
+
+ public String toString() {
+ return buildInternal();
+ }
+}
diff --git a/src/main/java/com/noorq/casser/core/Casser.java b/src/main/java/com/noorq/casser/core/Casser.java
deleted file mode 100644
index 1c53655..0000000
--- a/src/main/java/com/noorq/casser/core/Casser.java
+++ /dev/null
@@ -1,173 +0,0 @@
-/*
- * Copyright (C) 2015 The Casser Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.noorq.casser.core;
-
-import java.util.Map;
-import java.util.Objects;
-import java.util.Optional;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ConcurrentMap;
-
-import com.datastax.driver.core.Cluster;
-import com.datastax.driver.core.Session;
-import com.noorq.casser.config.CasserSettings;
-import com.noorq.casser.config.DefaultCasserSettings;
-import com.noorq.casser.core.reflect.CasserPropertyNode;
-import com.noorq.casser.core.reflect.DslExportable;
-import com.noorq.casser.mapping.CasserEntity;
-import com.noorq.casser.support.CasserMappingException;
-
-
-
-public final class Casser {
-
- private static volatile CasserSettings settings = new DefaultCasserSettings();
-
- private static final ConcurrentMap, Object> dslCache = new ConcurrentHashMap, Object>();
-
- private static volatile CasserSession session;
-
- private Casser() {
- }
-
- public static CasserSession session() {
- return Objects.requireNonNull(session, "session is not initialized");
- }
-
- protected static void setSession(CasserSession newSession) {
- session = newSession;
- }
-
- public static void shutdown() {
- if (session != null) {
- session.close();
- }
- session = null;
- dslCache.clear();
- }
-
- public static CasserSettings settings() {
- return settings;
- }
-
- public static CasserSettings settings(CasserSettings overrideSettings) {
- CasserSettings old = settings;
- settings = overrideSettings;
- return old;
- }
-
- public static SessionInitializer connect(Cluster cluster) {
- Session session = cluster.connect();
- return new SessionInitializer(session);
- }
-
- public static SessionInitializer connect(Cluster cluster, String keyspace) {
- Session session = cluster.connect(keyspace);
- return new SessionInitializer(session);
- }
-
- public static SessionInitializer init(Session session) {
-
- if (session == null) {
- throw new IllegalArgumentException("empty session");
- }
-
- return new SessionInitializer(session);
- }
-
- public static void clearDslCache() {
- dslCache.clear();
- }
-
- public static E dsl(Class iface) {
- return dsl(iface, iface.getClassLoader(), Optional.empty());
- }
-
- public static E dsl(Class iface, ClassLoader classLoader) {
- return dsl(iface, classLoader, Optional.empty());
- }
-
- public static E dsl(Class iface, ClassLoader classLoader, Optional parent) {
-
- Object instance = null;
-
- if (!parent.isPresent()) {
- instance = dslCache.get(iface);
- }
-
- if (instance == null) {
-
- instance = settings.getDslInstantiator().instantiate(iface, classLoader, parent);
-
- if (!parent.isPresent()) {
-
- Object c = dslCache.putIfAbsent(iface, instance);
- if (c != null) {
- instance = c;
- }
-
- }
- }
-
- return (E) instance;
- }
-
- public static E map(Class iface, Map src) {
- return map(iface, src, iface.getClassLoader());
- }
-
- public static E map(Class iface, Map src, ClassLoader classLoader) {
- return settings.getMapperInstantiator().instantiate(iface, src, classLoader);
- }
-
- public static CasserEntity entity(Class> iface) {
-
- Object dsl = dsl(iface);
-
- DslExportable e = (DslExportable) dsl;
-
- return e.getCasserMappingEntity();
- }
-
- public static CasserEntity resolve(Object ifaceOrDsl) {
-
- if (ifaceOrDsl == null) {
- throw new CasserMappingException("ifaceOrDsl is null");
- }
-
- if (ifaceOrDsl instanceof DslExportable) {
-
- DslExportable e = (DslExportable) ifaceOrDsl;
-
- return e.getCasserMappingEntity();
- }
-
- if (ifaceOrDsl instanceof Class) {
-
- Class> iface = (Class>) ifaceOrDsl;
-
- if (!iface.isInterface()) {
- throw new CasserMappingException("class is not an interface " + iface);
- }
-
- return entity(iface);
-
- }
-
- throw new CasserMappingException("unknown dsl object or mapping interface " + ifaceOrDsl);
- }
-
-}
diff --git a/src/main/java/com/noorq/casser/core/Mappers.java b/src/main/java/com/noorq/casser/core/Mappers.java
deleted file mode 100644
index 212a8b4..0000000
--- a/src/main/java/com/noorq/casser/core/Mappers.java
+++ /dev/null
@@ -1,238 +0,0 @@
-/*
- * Copyright (C) 2015 The Casser Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.noorq.casser.core;
-
-import java.util.function.Function;
-
-import com.datastax.driver.core.Row;
-import com.noorq.casser.core.reflect.CasserPropertyNode;
-import com.noorq.casser.mapping.CasserProperty;
-import com.noorq.casser.mapping.value.ColumnValueProvider;
-import com.noorq.casser.support.Fun;
-
-public final class Mappers {
-
- private Mappers() {
- }
-
- public final static class Mapper1 implements Function> {
-
- private final ColumnValueProvider provider;
- private final CasserProperty p1;
-
- public Mapper1(ColumnValueProvider provider, CasserPropertyNode p1) {
- this.provider = provider;
- this.p1 = p1.getProperty();
- }
-
- @Override
- public Fun.Tuple1 apply(Row row) {
- return new Fun.Tuple1(provider.getColumnValue(row, 0, p1));
- }
- }
-
- public final static class Mapper2 implements Function> {
-
- private final ColumnValueProvider provider;
- private final CasserProperty p1;
- private final CasserProperty p2;
-
- public Mapper2(ColumnValueProvider provider,
- CasserPropertyNode p1,
- CasserPropertyNode p2) {
- this.provider = provider;
- this.p1 = p1.getProperty();
- this.p2 = p2.getProperty();
- }
-
- @Override
- public Fun.Tuple2 apply(Row row) {
- return new Fun.Tuple2(
- provider.getColumnValue(row, 0, p1),
- provider.getColumnValue(row, 1, p2));
- }
- }
-
- public final static class Mapper3 implements Function> {
-
- private final ColumnValueProvider provider;
- private final CasserProperty p1;
- private final CasserProperty p2;
- private final CasserProperty p3;
-
- public Mapper3(ColumnValueProvider provider,
- CasserPropertyNode p1,
- CasserPropertyNode p2,
- CasserPropertyNode p3) {
- this.provider = provider;
- this.p1 = p1.getProperty();
- this.p2 = p2.getProperty();
- this.p3 = p3.getProperty();
- }
-
- @Override
- public Fun.Tuple3 apply(Row row) {
- return new Fun.Tuple3(
- provider.getColumnValue(row, 0, p1),
- provider.getColumnValue(row, 1, p2),
- provider.getColumnValue(row, 2, p3)
- );
- }
- }
-
- public final static class Mapper4 implements Function> {
-
- private final ColumnValueProvider provider;
- private final CasserProperty p1;
- private final CasserProperty p2;
- private final CasserProperty p3;
- private final CasserProperty p4;
-
- public Mapper4(ColumnValueProvider provider,
- CasserPropertyNode p1,
- CasserPropertyNode p2,
- CasserPropertyNode p3,
- CasserPropertyNode p4
- ) {
- this.provider = provider;
- this.p1 = p1.getProperty();
- this.p2 = p2.getProperty();
- this.p3 = p3.getProperty();
- this.p4 = p4.getProperty();
- }
-
- @Override
- public Fun.Tuple4 apply(Row row) {
- return new Fun.Tuple4(
- provider.getColumnValue(row, 0, p1),
- provider.getColumnValue(row, 1, p2),
- provider.getColumnValue(row, 2, p3),
- provider.getColumnValue(row, 3, p4)
- );
- }
- }
-
- public final static class Mapper5 implements Function> {
-
- private final ColumnValueProvider provider;
- private final CasserProperty p1, p2, p3, p4, p5;
-
- public Mapper5(ColumnValueProvider provider,
- CasserPropertyNode p1,
- CasserPropertyNode p2,
- CasserPropertyNode p3,
- CasserPropertyNode p4,
- CasserPropertyNode p5
- ) {
- this.provider = provider;
- this.p1 = p1.getProperty();
- this.p2 = p2.getProperty();
- this.p3 = p3.getProperty();
- this.p4 = p4.getProperty();
- this.p5 = p5.getProperty();
- }
-
- @Override
- public Fun.Tuple5 apply(Row row) {
- return new Fun.Tuple5(
- provider.getColumnValue(row, 0, p1),
- provider.getColumnValue(row, 1, p2),
- provider.getColumnValue(row, 2, p3),
- provider.getColumnValue(row, 3, p4),
- provider.getColumnValue(row, 4, p5)
- );
- }
- }
-
-
- public final static class Mapper6 implements
- Function> {
-
- private final ColumnValueProvider provider;
- private final CasserProperty p1, p2, p3, p4, p5, p6;
-
- public Mapper6(ColumnValueProvider provider,
- CasserPropertyNode p1,
- CasserPropertyNode p2,
- CasserPropertyNode p3,
- CasserPropertyNode p4,
- CasserPropertyNode p5,
- CasserPropertyNode p6
- ) {
- this.provider = provider;
- this.p1 = p1.getProperty();
- this.p2 = p2.getProperty();
- this.p3 = p3.getProperty();
- this.p4 = p4.getProperty();
- this.p5 = p5.getProperty();
- this.p6 = p6.getProperty();
- }
-
- @Override
- public Fun.Tuple6 apply(Row row) {
- return new Fun.Tuple6(
- provider.getColumnValue(row, 0, p1),
- provider.getColumnValue(row, 1, p2),
- provider.getColumnValue(row, 2, p3),
- provider.getColumnValue(row, 3, p4),
- provider.getColumnValue(row, 4, p5),
- provider.getColumnValue(row, 5, p6)
- );
- }
- }
-
- public final static class Mapper7 implements
- Function> {
-
- private final ColumnValueProvider provider;
- private final CasserProperty p1, p2, p3, p4, p5, p6, p7;
-
- public Mapper7(ColumnValueProvider provider,
- CasserPropertyNode p1,
- CasserPropertyNode p2,
- CasserPropertyNode p3,
- CasserPropertyNode p4,
- CasserPropertyNode p5,
- CasserPropertyNode p6,
- CasserPropertyNode p7
- ) {
- this.provider = provider;
- this.p1 = p1.getProperty();
- this.p2 = p2.getProperty();
- this.p3 = p3.getProperty();
- this.p4 = p4.getProperty();
- this.p5 = p5.getProperty();
- this.p6 = p6.getProperty();
- this.p7 = p7.getProperty();
- }
-
- @Override
- public Fun.Tuple7 apply(Row row) {
- return new Fun.Tuple7(
- provider.getColumnValue(row, 0, p1),
- provider.getColumnValue(row, 1, p2),
- provider.getColumnValue(row, 2, p3),
- provider.getColumnValue(row, 3, p4),
- provider.getColumnValue(row, 4, p5),
- provider.getColumnValue(row, 5, p6),
- provider.getColumnValue(row, 6, p7)
- );
- }
- }
-
-}
diff --git a/src/main/java/com/noorq/casser/core/Postulate.java b/src/main/java/com/noorq/casser/core/Postulate.java
deleted file mode 100644
index 8d7b033..0000000
--- a/src/main/java/com/noorq/casser/core/Postulate.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- * Copyright (C) 2015 The Casser Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.noorq.casser.core;
-
-import java.util.Arrays;
-
-import com.datastax.driver.core.querybuilder.Clause;
-import com.datastax.driver.core.querybuilder.QueryBuilder;
-import com.noorq.casser.core.reflect.CasserPropertyNode;
-import com.noorq.casser.mapping.value.ColumnValuePreparer;
-import com.noorq.casser.support.CasserMappingException;
-
-public final class Postulate {
-
- private final Operator operator;
- private final V[] values;
-
- protected Postulate(Operator op, V[] values) {
- this.operator = op;
- this.values = values;
- }
-
- public static Postulate of(Operator op, V... values) {
- return new Postulate(op, values);
- }
-
- public Clause getClause(CasserPropertyNode node, ColumnValuePreparer valuePreparer) {
-
- switch(operator) {
-
- case EQ:
- return QueryBuilder.eq(node.getColumnName(),
- valuePreparer.prepareColumnValue(values[0], node.getProperty()));
-
- case IN:
- Object[] preparedValues = new Object[values.length];
- for (int i = 0; i != values.length; ++i) {
- preparedValues[i] = valuePreparer.prepareColumnValue(values[i], node.getProperty());
- }
- return QueryBuilder.in(node.getColumnName(), preparedValues);
-
- case LT:
- return QueryBuilder.lt(node.getColumnName(),
- valuePreparer.prepareColumnValue(values[0], node.getProperty()));
-
- case LTE:
- return QueryBuilder.lte(node.getColumnName(),
- valuePreparer.prepareColumnValue(values[0], node.getProperty()));
-
- case GT:
- return QueryBuilder.gt(node.getColumnName(),
- valuePreparer.prepareColumnValue(values[0], node.getProperty()));
-
- case GTE:
- return QueryBuilder.gte(node.getColumnName(),
- valuePreparer.prepareColumnValue(values[0], node.getProperty()));
-
- default:
- throw new CasserMappingException("unknown filter operation " + operator);
- }
-
- }
-
- @Override
- public String toString() {
-
- if (operator == Operator.IN) {
-
- if (values == null) {
- return "in()";
- }
-
- int len = values.length;
- StringBuilder b = new StringBuilder();
- b.append("in(");
- for (int i = 0; i != len; i++) {
- if (b.length() > 3) {
- b.append(", ");
- }
- b.append(String.valueOf(values[i]));
- }
- return b.append(')').toString();
-
- }
-
- return operator.getName() + values[0];
-
- }
-
-
-}
diff --git a/src/main/java/com/noorq/casser/core/SchemaUtil.java b/src/main/java/com/noorq/casser/core/SchemaUtil.java
deleted file mode 100644
index f77036f..0000000
--- a/src/main/java/com/noorq/casser/core/SchemaUtil.java
+++ /dev/null
@@ -1,378 +0,0 @@
-/*
- * Copyright (C) 2015 The Casser Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.noorq.casser.core;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-import java.util.stream.Collectors;
-
-import com.datastax.driver.core.ColumnMetadata;
-import com.datastax.driver.core.ColumnMetadata.IndexMetadata;
-import com.datastax.driver.core.DataType;
-import com.datastax.driver.core.RegularStatement;
-import com.datastax.driver.core.SimpleStatement;
-import com.datastax.driver.core.TableMetadata;
-import com.datastax.driver.core.UserType;
-import com.datastax.driver.core.schemabuilder.Alter;
-import com.datastax.driver.core.schemabuilder.Create;
-import com.datastax.driver.core.schemabuilder.Create.Options;
-import com.datastax.driver.core.schemabuilder.CreateType;
-import com.datastax.driver.core.schemabuilder.SchemaBuilder;
-import com.datastax.driver.core.schemabuilder.SchemaStatement;
-import com.noorq.casser.mapping.CasserEntity;
-import com.noorq.casser.mapping.CasserEntityType;
-import com.noorq.casser.mapping.CasserProperty;
-import com.noorq.casser.mapping.ColumnType;
-import com.noorq.casser.mapping.OrderingDirection;
-import com.noorq.casser.mapping.type.OptionalColumnMetadata;
-import com.noorq.casser.support.CasserMappingException;
-import com.noorq.casser.support.CqlUtil;
-
-public final class SchemaUtil {
-
- private SchemaUtil() {
- }
-
- public static RegularStatement use(String keyspace, boolean forceQuote) {
- if (forceQuote) {
- return new SimpleStatement("USE" + CqlUtil.forceQuote(keyspace));
- }
- else {
- return new SimpleStatement("USE " + keyspace);
- }
- }
-
- public static SchemaStatement createUserType(CasserEntity entity) {
-
- if (entity.getType() != CasserEntityType.UDT) {
- throw new CasserMappingException("expected UDT entity " + entity);
- }
-
- CreateType create = SchemaBuilder.createType(entity.getName().toCql());
-
- for (CasserProperty prop : entity.getOrderedProperties()) {
-
- ColumnType columnType = prop.getColumnType();
-
- if (columnType == ColumnType.PARTITION_KEY || columnType == ColumnType.CLUSTERING_COLUMN) {
- throw new CasserMappingException("primary key columns are not supported in UserDefinedType for " + prop.getPropertyName() + " in entity " + entity);
- }
-
- try {
- prop.getDataType().addColumn(create, prop.getColumnName());
- }
- catch(IllegalArgumentException e) {
- throw new CasserMappingException("invalid column name '" + prop.getColumnName() + "' in entity '" + entity.getName().getName() + "'", e);
- }
- }
-
- return create;
- }
-
- public static List alterUserType(UserType userType,
- CasserEntity entity, boolean dropUnusedColumns) {
-
- if (entity.getType() != CasserEntityType.UDT) {
- throw new CasserMappingException("expected UDT entity " + entity);
- }
-
- List result = new ArrayList();
-
- /**
- * TODO: In future replace SchemaBuilder.alterTable by SchemaBuilder.alterType when it will exist
- */
-
- Alter alter = SchemaBuilder.alterTable(entity.getName().toCql());
-
- final Set visitedColumns = dropUnusedColumns ? new HashSet()
- : Collections. emptySet();
-
- for (CasserProperty prop : entity.getOrderedProperties()) {
-
- String columnName = prop.getColumnName().getName();
-
- if (dropUnusedColumns) {
- visitedColumns.add(columnName);
- }
-
- ColumnType columnType = prop.getColumnType();
-
- if (columnType == ColumnType.PARTITION_KEY || columnType == ColumnType.CLUSTERING_COLUMN) {
- continue;
- }
-
- DataType dataType = userType.getFieldType(columnName);
- SchemaStatement stmt = prop.getDataType().alterColumn(alter, prop.getColumnName(), optional(columnName, dataType));
-
- if (stmt != null) {
- result.add(stmt);
- }
-
- }
-
- if (dropUnusedColumns) {
- for (String field : userType.getFieldNames()) {
- if (!visitedColumns.contains(field)) {
-
- result.add(alter.dropColumn(field));
-
- }
- }
- }
-
- return result;
-
- }
-
-
- public static SchemaStatement dropUserType(CasserEntity entity) {
-
- if (entity.getType() != CasserEntityType.UDT) {
- throw new CasserMappingException("expected UDT entity " + entity);
- }
-
- return SchemaBuilder.dropType(entity.getName().toCql());
- }
-
- public static SchemaStatement createTable(CasserEntity entity) {
-
- if (entity.getType() != CasserEntityType.TABLE) {
- throw new CasserMappingException("expected table entity " + entity);
- }
-
- Create create = SchemaBuilder.createTable(entity.getName().toCql());
-
- create.ifNotExists();
-
- List clusteringColumns = new ArrayList();
-
- for (CasserProperty prop : entity.getOrderedProperties()) {
-
- ColumnType columnType = prop.getColumnType();
-
- if (columnType == ColumnType.CLUSTERING_COLUMN) {
- clusteringColumns.add(prop);
- }
-
- prop.getDataType().addColumn(create, prop.getColumnName());
-
- }
-
- if (!clusteringColumns.isEmpty()) {
- Options options = create.withOptions();
- clusteringColumns.forEach(p -> options.clusteringOrder(p.getColumnName().toCql(), mapDirection(p.getOrdering())));
- }
-
- return create;
-
- }
-
- public static List alterTable(TableMetadata tmd,
- CasserEntity entity, boolean dropUnusedColumns) {
-
- if (entity.getType() != CasserEntityType.TABLE) {
- throw new CasserMappingException("expected table entity " + entity);
- }
-
- List result = new ArrayList();
-
- Alter alter = SchemaBuilder.alterTable(entity.getName().toCql());
-
- final Set visitedColumns = dropUnusedColumns ? new HashSet()
- : Collections. emptySet();
-
- for (CasserProperty prop : entity.getOrderedProperties()) {
-
- String columnName = prop.getColumnName().getName();
-
- if (dropUnusedColumns) {
- visitedColumns.add(columnName);
- }
-
- ColumnType columnType = prop.getColumnType();
-
- if (columnType == ColumnType.PARTITION_KEY || columnType == ColumnType.CLUSTERING_COLUMN) {
- continue;
- }
-
- ColumnMetadata columnMetadata = tmd.getColumn(columnName);
- SchemaStatement stmt = prop.getDataType().alterColumn(alter, prop.getColumnName(), optional(columnMetadata));
-
- if (stmt != null) {
- result.add(stmt);
- }
-
- }
-
- if (dropUnusedColumns) {
- for (ColumnMetadata cm : tmd.getColumns()) {
- if (!visitedColumns.contains(cm.getName())) {
-
- result.add(alter.dropColumn(cm.getName()));
-
- }
- }
- }
-
- return result;
- }
-
- public static SchemaStatement dropTable(CasserEntity entity) {
-
- if (entity.getType() != CasserEntityType.TABLE) {
- throw new CasserMappingException("expected table entity " + entity);
- }
-
- return SchemaBuilder.dropTable(entity.getName().toCql()).ifExists();
-
- }
-
- public static SchemaStatement createIndex(CasserProperty prop) {
-
- return SchemaBuilder.createIndex(prop.getIndexName().get().toCql())
- .ifNotExists()
- .onTable(prop.getEntity().getName().toCql())
- .andColumn(prop.getColumnName().toCql());
-
- }
-
- public static List createIndexes(CasserEntity entity) {
-
- return entity.getOrderedProperties().stream()
- .filter(p -> p.getIndexName().isPresent())
- .map(p -> SchemaUtil.createIndex(p))
- .collect(Collectors.toList());
-
- }
-
- public static List alterIndexes(TableMetadata tmd,
- CasserEntity entity, boolean dropUnusedIndexes) {
-
- List list = new ArrayList();
-
- final Set visitedColumns = dropUnusedIndexes ? new HashSet()
- : Collections. emptySet();
-
- entity
- .getOrderedProperties()
- .stream()
- .filter(p -> p.getIndexName().isPresent())
- .forEach(p -> {
-
- String columnName = p.getColumnName().getName();
-
- if (dropUnusedIndexes) {
- visitedColumns.add(columnName);
- }
-
- ColumnMetadata cm = tmd.getColumn(columnName);
-
- if (cm != null) {
- IndexMetadata im = cm.getIndex();
- if (im == null) {
- list.add(createIndex(p));
- }
- }
- else {
- list.add(createIndex(p));
- }
-
-
- });
-
- if (dropUnusedIndexes) {
-
- tmd
- .getColumns()
- .stream()
- .filter(c -> c.getIndex() != null && !visitedColumns.contains(c.getName()))
- .forEach(c -> {
-
- list.add(SchemaBuilder.dropIndex(c.getIndex().getName()).ifExists());
-
- });
-
-
- }
-
- return list;
-
- }
-
- public static SchemaStatement dropIndex(CasserProperty prop) {
- return SchemaBuilder.dropIndex(prop.getIndexName().get().toCql()).ifExists();
- }
-
- private static SchemaBuilder.Direction mapDirection(OrderingDirection o) {
- switch(o) {
- case ASC:
- return SchemaBuilder.Direction.ASC;
- case DESC:
- return SchemaBuilder.Direction.DESC;
- }
- throw new CasserMappingException("unknown ordering " + o);
- }
-
- public static void throwNoMapping(CasserProperty prop) {
-
- throw new CasserMappingException(
- "only primitive types and Set,List,Map collections and UserDefinedTypes are allowed, unknown type for property '" + prop.getPropertyName()
- + "' type is '" + prop.getJavaType() + "' in the entity " + prop.getEntity());
-
- }
-
- private static OptionalColumnMetadata optional(final ColumnMetadata columnMetadata) {
- if (columnMetadata != null) {
- return new OptionalColumnMetadata() {
-
- @Override
- public String getName() {
- return columnMetadata.getName();
- }
-
- @Override
- public DataType getType() {
- return columnMetadata.getType();
- }
-
- };
- }
- return null;
- }
-
- private static OptionalColumnMetadata optional(final String name, final DataType dataType) {
- if (dataType != null) {
- return new OptionalColumnMetadata() {
-
- @Override
- public String getName() {
- return name;
- }
-
- @Override
- public DataType getType() {
- return dataType;
- }
-
- };
- }
- return null;
- }
-
-}
diff --git a/src/main/java/com/noorq/casser/mapping/CasserMappingEntity.java b/src/main/java/com/noorq/casser/mapping/CasserMappingEntity.java
deleted file mode 100644
index 8ab9e38..0000000
--- a/src/main/java/com/noorq/casser/mapping/CasserMappingEntity.java
+++ /dev/null
@@ -1,244 +0,0 @@
-/*
- *
- * Copyright (C) 2015 The Casser Authors
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.noorq.casser.mapping;
-
-import java.lang.reflect.Method;
-import java.util.ArrayList;
-import java.util.BitSet;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.List;
-import java.util.Objects;
-import java.util.Optional;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
-import com.noorq.casser.config.CasserSettings;
-import com.noorq.casser.core.Casser;
-import com.noorq.casser.mapping.annotation.Table;
-import com.noorq.casser.mapping.annotation.Tuple;
-import com.noorq.casser.mapping.annotation.UDT;
-import com.noorq.casser.support.CasserMappingException;
-
-public final class CasserMappingEntity implements CasserEntity {
-
- private final Class> iface;
- private final CasserEntityType type;
- private final IdentityName name;
- private final ImmutableMap props;
- private final ImmutableList orderedProps;
-
- public CasserMappingEntity(Class> iface) {
- this(iface, autoDetectType(iface));
- }
-
- public CasserMappingEntity(Class> iface, CasserEntityType type) {
-
- if (iface == null || !iface.isInterface()) {
- throw new IllegalArgumentException("invalid parameter " + iface);
- }
-
- this.iface = iface;
- this.type = Objects.requireNonNull(type, "type is empty");
- this.name = resolveName(iface, type);
-
- CasserSettings settings = Casser.settings();
-
- Method[] all = iface.getDeclaredMethods();
-
- List propsLocal = new ArrayList();
- ImmutableMap.Builder propsBuilder = ImmutableMap.builder();
-
- for (Method m : all) {
-
- if (settings.getGetterMethodDetector().apply(m)) {
-
- CasserProperty prop = new CasserMappingProperty(this, m);
-
- propsBuilder.put(prop.getPropertyName(), prop);
- propsLocal.add(prop);
-
- }
-
- }
-
- this.props = propsBuilder.build();
-
- Collections.sort(propsLocal, TypeAndOrdinalColumnComparator.INSTANCE);
- this.orderedProps = ImmutableList.copyOf(propsLocal);
-
- validateOrdinals();
- }
-
- @Override
- public CasserEntityType getType() {
- return type;
- }
-
- @Override
- public Class> getMappingInterface() {
- return iface;
- }
-
- @Override
- public Collection getOrderedProperties() {
- return orderedProps;
- }
-
- @Override
- public CasserProperty getProperty(String name) {
- return props.get(name);
- }
-
- @Override
- public IdentityName getName() {
- return name;
- }
-
- private static IdentityName resolveName(Class> iface, CasserEntityType type) {
-
- switch(type) {
-
- case TABLE:
- return MappingUtil.getTableName(iface, true);
-
- case TUPLE:
- return IdentityName.of(MappingUtil.getDefaultEntityName(iface), false);
-
- case UDT:
- return MappingUtil.getUserDefinedTypeName(iface, true);
- }
-
- throw new CasserMappingException("invalid entity type " + type + " in " + type);
-
- }
-
- private static CasserEntityType autoDetectType(Class> iface) {
-
- Objects.requireNonNull(iface, "empty iface");
-
- if (null != iface.getDeclaredAnnotation(Table.class)) {
- return CasserEntityType.TABLE;
- }
-
- else if (null != iface.getDeclaredAnnotation(Tuple.class)) {
- return CasserEntityType.TUPLE;
- }
-
- else if (null != iface.getDeclaredAnnotation(UDT.class)) {
- return CasserEntityType.UDT;
- }
-
- throw new CasserMappingException("entity must be annotated by @Table or @Tuple or @UserDefinedType " + iface);
- }
-
- private void validateOrdinals() {
-
- switch(getType()) {
-
- case TABLE:
- validateOrdinalsForTable();
- break;
-
- case TUPLE:
- validateOrdinalsInTuple();
- break;
-
- default:
- break;
- }
-
- }
-
- private void validateOrdinalsForTable() {
-
- BitSet partitionKeys = new BitSet();
- BitSet clusteringColumns = new BitSet();
-
- for (CasserProperty prop : getOrderedProperties()) {
-
- ColumnType type = prop.getColumnType();
-
- int ordinal = prop.getOrdinal();
-
- switch(type) {
-
- case PARTITION_KEY:
- if (partitionKeys.get(ordinal)) {
- throw new CasserMappingException("detected two or more partition key columns with the same ordinal " + ordinal + " in " + prop.getEntity());
- }
- partitionKeys.set(ordinal);
- break;
-
- case CLUSTERING_COLUMN:
- if (clusteringColumns.get(ordinal)) {
- throw new CasserMappingException("detected two or clustering columns with the same ordinal " + ordinal + " in " + prop.getEntity());
- }
- clusteringColumns.set(ordinal);
- break;
-
- default:
- break;
- }
-
- }
-
- }
-
- private void validateOrdinalsInTuple() {
- boolean[] ordinals = new boolean[props.size()];
-
- getOrderedProperties().forEach(p -> {
-
- int ordinal = p.getOrdinal();
-
- if (ordinal < 0 || ordinal >= ordinals.length) {
- throw new CasserMappingException("invalid ordinal " + ordinal + " found for property " + p.getPropertyName() + " in " + p.getEntity());
- }
-
- if (ordinals[ordinal]) {
- throw new CasserMappingException("detected two or more properties with the same ordinal " + ordinal + " in " + p.getEntity());
- }
-
- ordinals[ordinal] = true;
-
- });
-
- for (int i = 0; i != ordinals.length; ++i) {
- if (!ordinals[i]) {
- throw new CasserMappingException("detected absent ordinal " + i + " in " + this);
- }
- }
-
- }
-
- @Override
- public String toString() {
-
- StringBuilder str = new StringBuilder();
- str.append(iface.getSimpleName())
- .append("(").append(name.getName()).append(") ")
- .append(type.name().toLowerCase())
- .append(":\n");
-
- for (CasserProperty prop : getOrderedProperties()) {
- str.append(prop.toString());
- str.append("\n");
- }
- return str.toString();
- }
-
-}
diff --git a/src/main/java/com/noorq/casser/mapping/annotation/ClusteringColumn.java b/src/main/java/com/noorq/casser/mapping/annotation/ClusteringColumn.java
deleted file mode 100644
index dc497fe..0000000
--- a/src/main/java/com/noorq/casser/mapping/annotation/ClusteringColumn.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/*
- * Copyright (C) 2015 The Casser Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.noorq.casser.mapping.annotation;
-
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-
-import com.noorq.casser.mapping.OrderingDirection;
-
-/**
- * ClusteringColumn is the family column in legacy Cassandra API
- *
- * The purpose of this column is have additional dimension in the table.
- * Both @PartitionKey and @ClusteringColumn together are parts of the primary key of the table.
- * The primary difference between them is that the first one is using for routing purposes
- * in order to locate a data node in the cluster, otherwise the second one is using
- * inside the node to locate peace of data in concrete machine.
- *
- * ClusteringColumn can be represented as a Key in SortedMap that fully stored in a single node.
- * All developers must be careful for selecting fields for clustering columns, because all data
- * inside this SortedMap must fit in to one node.
- *
- * ClusteringColumn can have more than one part and the order of parts is important.
- * This order defines the way how Cassandra joins the parts and influence of data retrieval
- * operations. Each part can have ordering property that defines default ascending or descending order
- * of data. In case of two and more parts in select queries developer needs to have consisdent
- * order of all parts as they defined in table.
- *
- * For example, first part is ASC ordering, second is also ASC, so Cassandra will sort entries like this:
- * a-a
- * a-b
- * b-a
- * b-b
- * In this case we are able run queries:
- * ORDER BY first ASC, second ASC
- * ORDER BY first DESC, second DESC
- * WHERE first=? ORDER BY second ASC
- * WHERE first=? ORDER BY second DESC
- * WHERE first=? AND second=?
- *
- * But, we can not run queries:
- * ORDER BY first DESC, second ASC
- * ORDER BY first ASC, second DESC
- * WHERE second=? ORDER BY first (ASC,DESC)
- *
- * @author Alex Shvid
- *
- */
-
-@Retention(value = RetentionPolicy.RUNTIME)
-@Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
-public @interface ClusteringColumn {
-
- /**
- * Default value is the name of the method normalized to underscore
- *
- * @return name of the column
- */
-
- String value() default "";
-
- /**
- * ClusteringColumn parts must be ordered in the @Table. It is the requirement of Cassandra.
- * Cassandra joins all parts to the final clustering key that is stored in column family name.
- * Additionally all parts can have some ordering (ASC, DESC) that with sequence of parts
- * determines key comparison function, so Cassandra storing column family names always in sorted order.
- *
- * Be default ordinal has 0 value, that's because in most cases @Table have single column for ClusteringColumn
- * If you have 2 and more parts of the ClusteringColumn, then you need to use ordinal() to
- * define the sequence of the parts
- *
- * @return number that used to sort clustering columns
- */
-
- int ordinal() default 0;
-
- /**
- * Default order of values in the ClusteringColumn
- * This ordering is using for comparison of the clustering column values when Cassandra stores it in the
- * sorted order.
- *
- * Default value is the ascending order
- *
- * @return ascending order or descending order of clustering column values
- */
-
- OrderingDirection ordering() default OrderingDirection.ASC;
-
- /**
- * For reserved words in Cassandra we need quotation in CQL queries. This property marks that
- * the name of the UDT type needs to be quoted.
- *
- * Default value is false, we are quoting only selected names.
- *
- * @return true if name have to be quoted
- */
-
- boolean forceQuote() default false;
-
-}
diff --git a/src/main/java/com/noorq/casser/mapping/annotation/Constraints.java b/src/main/java/com/noorq/casser/mapping/annotation/Constraints.java
deleted file mode 100644
index 408dbb0..0000000
--- a/src/main/java/com/noorq/casser/mapping/annotation/Constraints.java
+++ /dev/null
@@ -1,277 +0,0 @@
-/*
- * Copyright (C) 2015 The Casser Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.noorq.casser.mapping.annotation;
-
-import java.lang.annotation.Documented;
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-
-import javax.validation.Constraint;
-
-import com.noorq.casser.mapping.validator.AlphabetValidator;
-import com.noorq.casser.mapping.validator.EmailValidator;
-import com.noorq.casser.mapping.validator.LengthValidator;
-import com.noorq.casser.mapping.validator.LowerCaseValidator;
-import com.noorq.casser.mapping.validator.MaxLengthValidator;
-import com.noorq.casser.mapping.validator.MinLengthValidator;
-import com.noorq.casser.mapping.validator.NotEmptyValidator;
-import com.noorq.casser.mapping.validator.NotNullValidator;
-import com.noorq.casser.mapping.validator.NumberValidator;
-import com.noorq.casser.mapping.validator.PatternValidator;
-import com.noorq.casser.mapping.validator.UpperCaseValidator;
-
-/**
- * Constraint annotations are using for data integrity mostly for @java.lang.String types.
- * The place of the annotation is the particular method in model interface.
- *
- * All of them does not have effect on selects and data retrieval operations.
- *
- * Support types:
- * - @NotNull supports any @java.lang.Object type
- * - All annotations support @java.lang.String type
- *
- * @author Alex Shvid
- *
- */
-
-public final class Constraints {
-
- private Constraints() {
- }
-
- /**
- * NotNull annotation is using to check that value is not null before storing it
- *
- * Applicable to use in any @java.lang.Object
- *
- * It does not check on selects and data retrieval operations
- *
- */
-
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- @Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
- @Constraint(validatedBy = NotNullValidator.class)
- public @interface NotNull {
-
- }
-
- /**
- * NotEmpty annotation is using to check that value has text before storing it
- *
- * Also checks for the null and it is more strict annotation then @NotNull
- *
- * Can be used for @java.lang.CharSequence, @ByteBuffer and any array
- *
- * It does not check on selects and data retrieval operations
- *
- */
-
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- @Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
- @Constraint(validatedBy = NotEmptyValidator.class)
- public @interface NotEmpty {
-
- }
-
- /**
- * Email annotation is using to check that value has a valid email before storing it
- *
- * Can be used only for @CharSequence
- *
- * It does not check on selects and data retrieval operations
- *
- */
-
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- @Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
- @Constraint(validatedBy = EmailValidator.class)
- public @interface Email {
-
- }
-
- /**
- * Number annotation is using to check that all letters in value are digits before storing it
- *
- * Can be used only for @java.lang.CharSequence
- *
- * It does not check on selects and data retrieval operations
- *
- */
-
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- @Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
- @Constraint(validatedBy = NumberValidator.class)
- public @interface Number {
-
- }
-
-
- /**
- * Alphabet annotation is using to check that all letters in value are in specific alphabet before storing it
- *
- * Can be used only for @java.lang.CharSequence
- *
- * It does not check on selects and data retrieval operations
- *
- */
-
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- @Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
- @Constraint(validatedBy = AlphabetValidator.class)
- public @interface Alphabet {
-
- /**
- * Defines alphabet that will be used to check value
- *
- * @return alphabet characters in the string
- */
-
- String value();
-
- }
-
- /**
- * Length annotation is using to ensure that value has exact length before storing it
- *
- * Can be used for @java.lang.CharSequence, @ByteBuffer and any array
- *
- * It does not have effect on selects and data retrieval operations
- *
- */
-
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- @Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
- @Constraint(validatedBy = LengthValidator.class)
- public @interface Length {
-
- int value();
-
- }
-
- /**
- * MaxLength annotation is using to ensure that value has length less or equal to some threshold before storing it
- *
- * Can be used for @java.lang.CharSequence, @ByteBuffer and byte[]
- *
- * It does not have effect on selects and data retrieval operations
- *
- */
-
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- @Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
- @Constraint(validatedBy = MaxLengthValidator.class)
- public @interface MaxLength {
-
- int value();
-
- }
-
- /**
- * MinLength annotation is using to ensure that value has length greater or equal to some threshold before storing it
- *
- * Can be used for @java.lang.CharSequence, @ByteBuffer and byte[]
- *
- * It does not have effect on selects and data retrieval operations
- *
- */
-
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- @Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
- @Constraint(validatedBy = MinLengthValidator.class)
- public @interface MinLength {
-
- int value();
-
- }
-
- /**
- * LowerCase annotation is using to ensure that value is in lower case before storing it
- *
- * Can be used only for @java.lang.CharSequence
- *
- * It does not have effect on selects and data retrieval operations
- *
- */
-
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- @Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
- @Constraint(validatedBy = LowerCaseValidator.class)
- public @interface LowerCase {
-
- }
-
- /**
- * UpperCase annotation is using to ensure that value is in upper case before storing it
- *
- * Can be used only for @java.lang.CharSequence
- *
- * It does not have effect on selects and data retrieval operations
- *
- */
-
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- @Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
- @Constraint(validatedBy = UpperCaseValidator.class)
- public @interface UpperCase {
-
- }
-
- /**
- * Pattern annotation is LowerCase annotation is using to ensure that value is upper case before storing it
- *
- * Can be used only for @java.lang.CharSequence
- *
- * It does not have effect on selects and data retrieval operations
- *
- */
-
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- @Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
- @Constraint(validatedBy = PatternValidator.class)
- public @interface Pattern {
-
- /**
- * User defined regex expression to check match of the value
- *
- * @return Java regex pattern
- */
-
- String value();
-
- /**
- * Regex flags composition
- *
- * @return Java regex flags
- */
-
- int flags();
-
- }
-
-}
diff --git a/src/main/java/com/noorq/casser/mapping/annotation/Types.java b/src/main/java/com/noorq/casser/mapping/annotation/Types.java
deleted file mode 100644
index 79808dc..0000000
--- a/src/main/java/com/noorq/casser/mapping/annotation/Types.java
+++ /dev/null
@@ -1,532 +0,0 @@
-/*
- * Copyright (C) 2015 The Casser Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.noorq.casser.mapping.annotation;
-
-import java.lang.annotation.Documented;
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-
-import com.datastax.driver.core.DataType;
-
-/**
- * Types annotations are using for clarification of Cassandra data type for particular Java type.
- *
- * Sometimes it is possible to have for single Java type multiple Cassandra data types:
- * - @String can be @DataType.Name.ASCII or @DataType.Name.TEXT or @DataType.Name.VARCHAR
- * - @Long can be @DataType.Name.BIGINT or @DataType.Name.COUNTER
- *
- * All those type annotations simplify mapping between Java types and Cassandra data types.
- * They are not required, for each Java type there is a default Cassandra data type in Casser, but in some
- * cases you would like to control mapping to make sure that the right Cassandra data type is using.
- *
- * For complex types like collections, UDF and Tuple types all those annotations are using to
- * clarify the sub-type(s) or class/UDF names.
- *
- * Has significant effect on schema operations.
- *
- * @author Alex Shvid
- *
- */
-
-public final class Types {
-
- private Types() {
- }
-
- /**
- * Says to use @DataType.Name.ASCII data type in schema
- * Java type is @String
- */
-
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- @Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
- public @interface Ascii {
-
- }
-
- /**
- * Says to use @DataType.Name.BIGINT data type in schema
- * Java type is @Long
- */
-
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- @Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
- public @interface Bigint {
-
- }
-
- /**
- * Says to use @DataType.Name.BLOB data type in schema
- * Java type is @ByteBuffer or @byte[]
- * Using by default
- */
-
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- @Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
- public @interface Blob {
-
- }
-
- /**
- * Says to use @DataType.Name.LIST data type in schema with specific sub-type
- * Java type is @List
- *
- * Casser does not allow to use a specific implementation of the collection thereof data retrieval operation
- * result can be a collection with another implementation.
- *
- * This annotation is usually used only for sub-types clarification and only in case if sub-type is Java type that
- * corresponds to multiple Cassandra data types.
- *
- * For this type there are special operations: prepend, prependAll, setIdx, append, appendAll, discard and discardAll in @UpdateOperation
- */
-
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- @Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
- public @interface List {
-
- /**
- * Clarification of using the sub-type data type in the collection.
- * It supports only simple data type (not Collection, UDT or Tuple)
- *
- * In case if you need UDT sub-type in the list, consider @UDTList annotation
- *
- * @return data type name of the value
- */
-
- DataType.Name value();
-
- }
-
- /**
- * Says to use @DataType.Name.MAP data type in schema with specific sub-types
- * Java type is @Map
- *
- * Casser does not allow to use a specific implementation of the collection thereof data retrieval operation
- * result can be a collection with another implementation.
- *
- * This annotation is usually used only for sub-types clarification and only in case if sub-type is Java type that
- * corresponds to multiple Cassandra data types.
- *
- * For this type there are special operations: put and putAll in @UpdateOperation.
- *
- */
-
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- @Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
- public @interface Map {
-
- /**
- * Clarification of using the sub-type data type in the collection.
- * It supports only simple data type (not Collection, UDT or Tuple)
- *
- * In case if you need UDT key sub-type in the map, consider @UDTKeyMap or @UDTMap annotations
- *
- * @return data type name of the key
- */
-
- DataType.Name key();
-
- /**
- * Clarification of using the sub-type data type in the collection.
- * It supports only simple data type (not Collection, UDT or Tuple)
- *
- * In case if you need UDT value sub-type in the map, consider @UDTValueMap or @UDTMap annotations
- *
- * @return data type name of the value
- */
-
- DataType.Name value();
-
- }
-
- /**
- * Says to use @DataType.Name.COUNTER type in schema
- * Java type is @Long
- *
- * For this type there are special operations: increment and decrement in @UpdateOperation.
- * You do not need to initialize counter value, it will be done automatically by Cassandra.
- */
-
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- @Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
- public @interface Counter {
-
- }
-
- /**
- * Says to use @DataType.Name.SET data type in schema with specific sub-type
- * Java type is @Set
- *
- * Casser does not allow to use a specific implementation of the collection thereof data retrieval operation
- * result can be a collection with another implementation.
- *
- * This annotation is usually used only for sub-types clarification and only in case if sub-type is Java type that
- * corresponds to multiple Cassandra data types.
- *
- * For this type there are special operations: add, addAll, remove and removeAll in @UpdateOperation.
- *
- */
-
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- @Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
- public @interface Set {
-
- /**
- * Clarification of using the sub-type data type in the collection.
- * It supports only simple data type (not Collection, UDT or Tuple)
- *
- * In case if you need UDT sub-type in the set, consider @UDTSet annotation
- *
- * @return data type name of the value
- */
-
- DataType.Name value();
-
- }
-
- /**
- * Says to use @DataType.Name.CUSTOM type in schema
- * Java type is @ByteBuffer or @byte[]
- *
- * Uses for custom user types that has special implementation.
- * Casser does not deal with this class directly for now, uses only in serialized form.
- */
-
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- @Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
- public @interface Custom {
-
- /**
- * Class name of the custom user type that is implementation of the type
- *
- * @return class name of the custom type implementation
- */
-
- String className();
-
- }
-
- /**
- * Says to use @DataType.Name.TEXT type in schema
- * Java type is @String
- * Using by default
- */
-
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- @Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
- public @interface Text {
-
- }
-
- /**
- * Says to use @DataType.Name.TIMESTAMP type in schema
- * Java type is @Date
- * Using by default
- */
-
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- @Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
- public @interface Timestamp {
-
- }
-
- /**
- * Says to use @DataType.Name.TIMEUUID type in schema
- * Java type is @UUID or @Date
- */
-
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- @Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
- public @interface Timeuuid {
-
- }
-
- /**
- * Says to use @DataType.Name.TUPLE type in schema
- * Java type is @TupleValue or model interface with @Tuple annotation
- */
-
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- public @interface Tuple {
-
- /**
- * If Java type is the @TupleValue then this field is required.
- * Any Cassandra Tuple is the sequence of Cassandra types.
- * For now Casser supports only simple data types in tuples for @TupleValue Java type
- *
- * In case if Java type is the model interface with @Tuple annotation then
- * all methods in this interface can have Types annotations that can be complex types as well.
- *
- * @return data type name sequence
- */
-
- DataType.Name[] value() default {};
-
- }
-
- /**
- * Says to use @DataType.Name.UDT type in schema
- * Java type is @UDTValue or model interface with @UDT annotation
- */
-
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- @Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
- public @interface UDT {
-
- /**
- * If Java type is the @UDTValue then this field is required.
- * Any Cassandra UDT has name and must be created before this use as a Cassandra Type.
- *
- * This value is the UDT name of the Cassandra Type that was already created in the schema
- *
- * In case of Java type is the model interface with @UDT annotation then
- * this field is not using since model interface defines UserDefinedType with specific name
- *
- * @return UDT name
- */
-
- String value() default "";
-
- /**
- * Only used for JavaType @UDTValue
- *
- * In case if value() method returns reserved word that can not be used as a name of UDT then
- * forceQuote will add additional quotes around this name in all CQL queries.
- *
- * Default value is false.
- *
- * @return true if quotation is needed
- */
-
- boolean forceQuote() default false;
-
- }
-
- /**
- * Says to use @DataType.Name.MAP data type in schema with specific UDT sub-type as a key and simple sub-type as a value
- * Java type is @Map
- *
- * Casser does not allow to use a specific implementation of the collection thereof data retrieval operation
- * result can be a collection with another implementation.
- *
- * This annotation is usually used only for sub-types clarification and only in case if sub-type is Java type that
- * corresponds to multiple Cassandra data types.
- *
- * For this type there are special operations: put and putAll in @UpdateOperation.
- *
- */
-
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- @Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
- public @interface UDTKeyMap {
-
- /**
- * Clarification of using the UDT data type as a key sub-type in the collection.
- *
- * @return annotation of UDT type
- */
-
- UDT key();
-
- /**
- * Clarification of using the sub-type data type in the collection.
- * It supports only simple data type (not Collection, UDT or Tuple)
- *
- * In case if you need UDT value sub-type in the map, consider @UDTMap annotations
- *
- * @return data type name of the value
- */
-
- DataType.Name value();
-
- }
-
- /**
- * Says to use @DataType.Name.LIST data type in schema with specific UDT sub-type
- * Java type is @List
- *
- * Casser does not allow to use a specific implementation of the collection thereof data retrieval operation
- * result can be a collection with another implementation.
- *
- * This annotation is usually used only for sub-types clarification and only in case if sub-type is Java type that
- * corresponds to multiple Cassandra data types.
- *
- * For this type there are special operations: prepend, prependAll, setIdx, append, appendAll, discard and discardAll in @UpdateOperation
- */
-
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- @Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
- public @interface UDTList {
-
- /**
- * Clarification of using the UDT data type as a sub-type in the collection.
- *
- * @return annotation of the UDT value
- */
-
- UDT value();
-
- }
-
- /**
- * Says to use @DataType.Name.MAP data type in schema with specific UDT sub-types
- * Java type is @Map
- *
- * Casser does not allow to use a specific implementation of the collection thereof data retrieval operation
- * result can be a collection with another implementation.
- *
- * This annotation is usually used only for sub-types clarification and only in case if sub-type is Java type that
- * corresponds to multiple Cassandra data types.
- *
- * For this type there are special operations: put and putAll in @UpdateOperation.
- *
- */
-
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- @Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
- public @interface UDTMap {
-
- /**
- * Clarification of using the UDT data type as a key sub-type in the collection.
- *
- * @return annotation of the UDT key
- */
-
- UDT key();
-
- /**
- * Clarification of using the UDT data type as a value sub-type in the collection.
- *
- * @return annotation of the UDT value
- */
-
- UDT value();
-
- }
-
- /**
- * Says to use @DataType.Name.SET data type in schema with specific UDT sub-type
- * Java type is @Set
- *
- * Casser does not allow to use a specific implementation of the collection thereof data retrieval operation
- * result can be a collection with another implementation.
- *
- * This annotation is usually used only for sub-types clarification and only in case if sub-type is Java type that
- * corresponds to multiple Cassandra data types.
- *
- * For this type there are special operations: add, addAll, remove and removeAll in @UpdateOperation.
- *
- */
-
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- @Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
- public @interface UDTSet {
-
- /**
- * Clarification of using the UDT data type as a sub-type in the collection.
- *
- * @return annotation of the UDT value
- */
-
- UDT value();
-
- }
-
- /**
- * Says to use @DataType.Name.MAP data type in schema with specific simple sub-type as a key and UDT sub-type as a value
- * Java type is @Map
- *
- * Casser does not allow to use a specific implementation of the collection thereof data retrieval operation
- * result can be a collection with another implementation.
- *
- * This annotation is usually used only for sub-types clarification and only in case if sub-type is Java type that
- * corresponds to multiple Cassandra data types.
- *
- * For this type there are special operations: put and putAll in @UpdateOperation.
- *
- */
-
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- @Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
- public @interface UDTValueMap {
-
- /**
- * Clarification of using the sub-type data type in the collection.
- * It supports only simple data type (not Collection, UDT or Tuple)
- *
- * In case if you need UDT key sub-type in the map, consider @UDTMap annotations
- *
- * @return data type name of the key
- */
-
- DataType.Name key();
-
- /**
- * Clarification of using the UDT data type as a value sub-type in the collection.
- *
- * @return annotation of the UDT value
- */
-
- UDT value();
-
- }
-
- /**
- * Says to use @DataType.Name.UUID type in schema
- * Java type is @UUID
- * Using by default
- */
-
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- @Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
- public @interface Uuid {
-
- }
-
- /**
- * Says to use @DataType.Name.VARCHAR type in schema
- * Java type is @String
- */
-
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- @Target(value = { ElementType.METHOD, ElementType.ANNOTATION_TYPE })
- public @interface Varchar {
-
- }
-
-}
diff --git a/src/main/java/com/noorq/casser/support/Scala.java b/src/main/java/com/noorq/casser/support/Scala.java
deleted file mode 100644
index 062ff27..0000000
--- a/src/main/java/com/noorq/casser/support/Scala.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Copyright (C) 2015 The Casser Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.noorq.casser.support;
-
-import scala.concurrent.Future;
-import scala.concurrent.impl.Promise.DefaultPromise;
-
-import com.google.common.util.concurrent.FutureCallback;
-import com.google.common.util.concurrent.Futures;
-import com.google.common.util.concurrent.ListenableFuture;
-
-public final class Scala {
-
- public static Future asFuture(ListenableFuture future) {
- final scala.concurrent.Promise promise = new DefaultPromise();
- Futures.addCallback(future, new FutureCallback() {
- @Override public void onSuccess(T result) {
- promise.success(result);
- }
- @Override public void onFailure(Throwable t) {
- promise.failure(t);
- }
- });
- return promise.future();
- }
-
- public static Future> asFuture(ListenableFuture future, A a) {
- final scala.concurrent.Promise> promise = new DefaultPromise>();
- Futures.addCallback(future, new FutureCallback() {
- @Override public void onSuccess(T result) {
- promise.success(new Fun.Tuple2(result, a));
- }
- @Override public void onFailure(Throwable t) {
- promise.failure(t);
- }
- });
- return promise.future();
- }
-
- public static Future> asFuture(ListenableFuture future, A a, B b) {
- final scala.concurrent.Promise> promise = new DefaultPromise>();
- Futures.addCallback(future, new FutureCallback() {
- @Override public void onSuccess(T result) {
- promise.success(new Fun.Tuple3(result, a, b));
- }
- @Override public void onFailure(Throwable t) {
- promise.failure(t);
- }
- });
- return promise.future();
- }
-
- public static Future> asFuture(ListenableFuture future, A a, B b, C c) {
- final scala.concurrent.Promise> promise = new DefaultPromise>();
- Futures.addCallback(future, new FutureCallback() {
- @Override public void onSuccess(T result) {
- promise.success(new Fun.Tuple4(result, a, b, c));
- }
- @Override public void onFailure(Throwable t) {
- promise.failure(t);
- }
- });
- return promise.future();
- }
-
- public static Future> asFuture(ListenableFuture future, A a, B b, C c, D d) {
- final scala.concurrent.Promise> promise = new DefaultPromise>();
- Futures.addCallback(future, new FutureCallback() {
- @Override public void onSuccess(T result) {
- promise.success(new Fun.Tuple5(result, a, b, c, d));
- }
- @Override public void onFailure(Throwable t) {
- promise.failure(t);
- }
- });
- return promise.future();
- }
-
-}
diff --git a/src/main/java/com/noorq/casser/config/DefaultCasserSettings.java b/src/main/java/net/helenus/config/DefaultHelenusSettings.java
similarity index 69%
rename from src/main/java/com/noorq/casser/config/DefaultCasserSettings.java
rename to src/main/java/net/helenus/config/DefaultHelenusSettings.java
index 145934d..cacc892 100644
--- a/src/main/java/com/noorq/casser/config/DefaultCasserSettings.java
+++ b/src/main/java/net/helenus/config/DefaultHelenusSettings.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2015 The Casser Authors
+ * Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -13,18 +13,18 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.noorq.casser.config;
+package net.helenus.config;
import java.lang.reflect.Method;
import java.util.function.Function;
-import com.noorq.casser.core.DslInstantiator;
-import com.noorq.casser.core.MapperInstantiator;
-import com.noorq.casser.core.reflect.ReflectionDslInstantiator;
-import com.noorq.casser.core.reflect.ReflectionMapperInstantiator;
-import com.noorq.casser.mapping.convert.CamelCaseToUnderscoreConverter;
+import net.helenus.core.DslInstantiator;
+import net.helenus.core.MapperInstantiator;
+import net.helenus.core.reflect.ReflectionDslInstantiator;
+import net.helenus.core.reflect.ReflectionMapperInstantiator;
+import net.helenus.mapping.convert.CamelCaseToUnderscoreConverter;
-public class DefaultCasserSettings implements CasserSettings {
+public class DefaultHelenusSettings implements HelenusSettings {
@Override
public Function getPropertyToColumnConverter() {
diff --git a/src/main/java/com/noorq/casser/config/GetterMethodDetector.java b/src/main/java/net/helenus/config/GetterMethodDetector.java
similarity index 75%
rename from src/main/java/com/noorq/casser/config/GetterMethodDetector.java
rename to src/main/java/net/helenus/config/GetterMethodDetector.java
index 866fb27..72c0f9f 100644
--- a/src/main/java/com/noorq/casser/config/GetterMethodDetector.java
+++ b/src/main/java/net/helenus/config/GetterMethodDetector.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2015 The Casser Authors
+ * Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -13,7 +13,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.noorq.casser.config;
+package net.helenus.config;
+
+import net.helenus.mapping.annotation.Transient;
import java.lang.reflect.Method;
import java.util.function.Function;
@@ -21,20 +23,25 @@ import java.util.function.Function;
public enum GetterMethodDetector implements Function {
INSTANCE;
-
+
@Override
public Boolean apply(Method method) {
-
+
if (method == null) {
throw new IllegalArgumentException("empty parameter");
}
-
+
if (method.getParameterCount() != 0 || method.getReturnType() == void.class) {
return false;
}
-
+
+ // Methods marked "Transient" are not mapped, skip them.
+ if (method.getDeclaredAnnotation(Transient.class) != null) {
+ return false;
+ }
+
return true;
-
+
}
}
diff --git a/src/main/java/com/noorq/casser/config/CasserSettings.java b/src/main/java/net/helenus/config/HelenusSettings.java
similarity index 77%
rename from src/main/java/com/noorq/casser/config/CasserSettings.java
rename to src/main/java/net/helenus/config/HelenusSettings.java
index bd25c59..c5fc17b 100644
--- a/src/main/java/com/noorq/casser/config/CasserSettings.java
+++ b/src/main/java/net/helenus/config/HelenusSettings.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2015 The Casser Authors
+ * Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -13,22 +13,22 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.noorq.casser.config;
+package net.helenus.config;
import java.lang.reflect.Method;
import java.util.function.Function;
-import com.noorq.casser.core.DslInstantiator;
-import com.noorq.casser.core.MapperInstantiator;
+import net.helenus.core.DslInstantiator;
+import net.helenus.core.MapperInstantiator;
-public interface CasserSettings {
+public interface HelenusSettings {
Function getPropertyToColumnConverter();
-
+
Function getGetterMethodDetector();
DslInstantiator getDslInstantiator();
-
+
MapperInstantiator getMapperInstantiator();
}
diff --git a/src/main/java/com/noorq/casser/core/AbstractSessionOperations.java b/src/main/java/net/helenus/core/AbstractSessionOperations.java
similarity index 68%
rename from src/main/java/com/noorq/casser/core/AbstractSessionOperations.java
rename to src/main/java/net/helenus/core/AbstractSessionOperations.java
index 5b2d9e1..36a5145 100644
--- a/src/main/java/com/noorq/casser/core/AbstractSessionOperations.java
+++ b/src/main/java/net/helenus/core/AbstractSessionOperations.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2015 The Casser Authors
+ * Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -13,141 +13,136 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.noorq.casser.core;
+package net.helenus.core;
import java.io.PrintStream;
import java.util.concurrent.Executor;
+import com.datastax.driver.core.schemabuilder.SchemaStatement;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import com.datastax.driver.core.PreparedStatement;
-import com.datastax.driver.core.RegularStatement;
-import com.datastax.driver.core.ResultSet;
-import com.datastax.driver.core.ResultSetFuture;
-import com.datastax.driver.core.Session;
-import com.datastax.driver.core.Statement;
+import com.datastax.driver.core.*;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.google.common.util.concurrent.ListenableFuture;
-import com.noorq.casser.mapping.value.ColumnValuePreparer;
-import com.noorq.casser.mapping.value.ColumnValueProvider;
-import com.noorq.casser.support.CasserException;
+
+import net.helenus.mapping.value.ColumnValuePreparer;
+import net.helenus.mapping.value.ColumnValueProvider;
+import net.helenus.support.HelenusException;
+
+import javax.xml.validation.Schema;
public abstract class AbstractSessionOperations {
final Logger logger = LoggerFactory.getLogger(getClass());
-
+
abstract public Session currentSession();
-
+
abstract public String usingKeyspace();
-
+
abstract public boolean isShowCql();
-
+
abstract public PrintStream getPrintStream();
-
+
abstract public Executor getExecutor();
-
+
abstract public SessionRepository getSessionRepository();
-
+
abstract public ColumnValueProvider getValueProvider();
-
+
abstract public ColumnValuePreparer getValuePreparer();
public PreparedStatement prepare(RegularStatement statement) {
-
+
try {
-
+
log(statement, false);
-
+
return currentSession().prepare(statement);
-
- }
- catch(RuntimeException e) {
+
+ } catch (RuntimeException e) {
throw translateException(e);
}
-
+
}
-
+
public ListenableFuture prepareAsync(RegularStatement statement) {
-
+
try {
-
+
log(statement, false);
-
+
return currentSession().prepareAsync(statement);
-
- }
- catch(RuntimeException e) {
+
+ } catch (RuntimeException e) {
throw translateException(e);
}
-
+
}
-
+
public ResultSet execute(Statement statement, boolean showValues) {
-
+
return executeAsync(statement, showValues).getUninterruptibly();
-
+
}
-
+
public ResultSetFuture executeAsync(Statement statement, boolean showValues) {
-
+
try {
-
+
log(statement, showValues);
-
+
return currentSession().executeAsync(statement);
-
- }
- catch(RuntimeException e) {
+
+ } catch (RuntimeException e) {
throw translateException(e);
}
-
+
}
void log(Statement statement, boolean showValues) {
-
+
if (logger.isInfoEnabled()) {
logger.info("Execute statement " + statement);
}
-
- if (isShowCql()) {
-
- if (statement instanceof BuiltStatement) {
-
- BuiltStatement builtStatement = (BuiltStatement) statement;
- if (showValues) {
- RegularStatement regularStatement = builtStatement.setForceNoValues(true);
- printCql(regularStatement.getQueryString());
- }
- else {
- printCql(builtStatement.getQueryString());
- }
-
- }
- else if (statement instanceof RegularStatement) {
+ if (isShowCql()) {
+
+ if (statement instanceof BuiltStatement) {
+
+ BuiltStatement builtStatement = (BuiltStatement) statement;
+
+ if (showValues) {
+ RegularStatement regularStatement = builtStatement.setForceNoValues(true);
+ printCql(regularStatement.getQueryString());
+ } else {
+ printCql(builtStatement.getQueryString());
+ }
+
+ } else if (statement instanceof RegularStatement) {
RegularStatement regularStatement = (RegularStatement) statement;
printCql(regularStatement.getQueryString());
- }
- else {
+ } else {
printCql(statement.toString());
}
-
}
}
-
+
+ public void cache(String key, Object value) {
+ }
+
RuntimeException translateException(RuntimeException e) {
-
- if (e instanceof CasserException) {
+
+ if (e instanceof HelenusException) {
return e;
}
-
- throw new CasserException(e);
+
+ throw new HelenusException(e);
}
-
+
void printCql(String cql) {
getPrintStream().println(cql);
}
-
+
}
diff --git a/src/main/java/com/noorq/casser/core/AutoDdl.java b/src/main/java/net/helenus/core/AutoDdl.java
similarity index 83%
rename from src/main/java/com/noorq/casser/core/AutoDdl.java
rename to src/main/java/net/helenus/core/AutoDdl.java
index 66af56c..d578cfd 100644
--- a/src/main/java/com/noorq/casser/core/AutoDdl.java
+++ b/src/main/java/net/helenus/core/AutoDdl.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2015 The Casser Authors
+ * Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -13,11 +13,8 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.noorq.casser.core;
+package net.helenus.core;
public enum AutoDdl {
- VALIDATE,
- UPDATE,
- CREATE,
- CREATE_DROP;
-}
\ No newline at end of file
+ VALIDATE, UPDATE, CREATE, CREATE_DROP;
+}
diff --git a/src/main/java/net/helenus/core/ConflictingUnitOfWorkException.java b/src/main/java/net/helenus/core/ConflictingUnitOfWorkException.java
new file mode 100644
index 0000000..703cb58
--- /dev/null
+++ b/src/main/java/net/helenus/core/ConflictingUnitOfWorkException.java
@@ -0,0 +1,11 @@
+package net.helenus.core;
+
+public class ConflictingUnitOfWorkException extends Exception {
+
+ final UnitOfWork uow;
+
+ ConflictingUnitOfWorkException(UnitOfWork uow) {
+ this.uow = uow;
+ }
+
+}
diff --git a/src/main/java/com/noorq/casser/core/DslInstantiator.java b/src/main/java/net/helenus/core/DslInstantiator.java
similarity index 69%
rename from src/main/java/com/noorq/casser/core/DslInstantiator.java
rename to src/main/java/net/helenus/core/DslInstantiator.java
index 970ea35..7a61aea 100644
--- a/src/main/java/com/noorq/casser/core/DslInstantiator.java
+++ b/src/main/java/net/helenus/core/DslInstantiator.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2015 The Casser Authors
+ * Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -13,14 +13,15 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.noorq.casser.core;
+package net.helenus.core;
import java.util.Optional;
-import com.noorq.casser.core.reflect.CasserPropertyNode;
+import com.datastax.driver.core.Metadata;
+import net.helenus.core.reflect.HelenusPropertyNode;
public interface DslInstantiator {
- E instantiate(Class iface, ClassLoader classLoader, Optional parent);
-
+ E instantiate(Class iface, ClassLoader classLoader, Optional parent, Metadata metadata);
+
}
diff --git a/src/main/java/com/noorq/casser/core/Filter.java b/src/main/java/net/helenus/core/Filter.java
similarity index 80%
rename from src/main/java/com/noorq/casser/core/Filter.java
rename to src/main/java/net/helenus/core/Filter.java
index 83d2954..c09200c 100644
--- a/src/main/java/com/noorq/casser/core/Filter.java
+++ b/src/main/java/net/helenus/core/Filter.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2015 The Casser Authors
+ * Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -13,33 +13,34 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.noorq.casser.core;
+package net.helenus.core;
import java.util.Objects;
import com.datastax.driver.core.querybuilder.Clause;
-import com.noorq.casser.core.reflect.CasserPropertyNode;
-import com.noorq.casser.mapping.MappingUtil;
-import com.noorq.casser.mapping.value.ColumnValuePreparer;
+
+import net.helenus.core.reflect.HelenusPropertyNode;
+import net.helenus.mapping.MappingUtil;
+import net.helenus.mapping.value.ColumnValuePreparer;
public final class Filter {
- private final CasserPropertyNode node;
+ private final HelenusPropertyNode node;
private final Postulate postulate;
-
- private Filter(CasserPropertyNode node, Postulate postulate) {
+
+ private Filter(HelenusPropertyNode node, Postulate postulate) {
this.node = node;
this.postulate = postulate;
}
-
- public CasserPropertyNode getNode() {
+
+ public HelenusPropertyNode getNode() {
return node;
}
public Clause getClause(ColumnValuePreparer valuePreparer) {
return postulate.getClause(node, valuePreparer);
}
-
+
public static Filter equal(Getter getter, V val) {
return create(getter, Operator.EQ, val);
}
@@ -47,26 +48,26 @@ public final class Filter {
public static Filter in(Getter getter, V... vals) {
Objects.requireNonNull(getter, "empty getter");
Objects.requireNonNull(vals, "empty values");
-
+
if (vals.length == 0) {
throw new IllegalArgumentException("values array is empty");
}
-
+
for (int i = 0; i != vals.length; ++i) {
Objects.requireNonNull(vals[i], "value[" + i + "] is empty");
}
-
- CasserPropertyNode node = MappingUtil.resolveMappingProperty(getter);
-
+
+ HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter);
+
Postulate postulate = Postulate.of(Operator.IN, vals);
-
+
return new Filter(node, postulate);
}
-
+
public static Filter greaterThan(Getter getter, V val) {
return create(getter, Operator.GT, val);
}
-
+
public static Filter lessThan(Getter getter, V val) {
return create(getter, Operator.LT, val);
}
@@ -83,24 +84,24 @@ public final class Filter {
Objects.requireNonNull(getter, "empty getter");
Objects.requireNonNull(postulate, "empty operator");
- CasserPropertyNode node = MappingUtil.resolveMappingProperty(getter);
-
+ HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter);
+
return new Filter(node, postulate);
}
-
+
public static Filter create(Getter getter, Operator op, V val) {
Objects.requireNonNull(getter, "empty getter");
Objects.requireNonNull(op, "empty op");
Objects.requireNonNull(val, "empty value");
-
+
if (op == Operator.IN) {
throw new IllegalArgumentException("invalid usage of the 'in' operator, use Filter.in() static method");
}
-
- CasserPropertyNode node = MappingUtil.resolveMappingProperty(getter);
-
+
+ HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter);
+
Postulate postulate = Postulate.of(op, val);
-
+
return new Filter(node, postulate);
}
@@ -108,7 +109,5 @@ public final class Filter {
public String toString() {
return node.getColumnName() + postulate.toString();
}
-
-
-
+
}
diff --git a/src/main/java/com/noorq/casser/core/Getter.java b/src/main/java/net/helenus/core/Getter.java
similarity index 88%
rename from src/main/java/com/noorq/casser/core/Getter.java
rename to src/main/java/net/helenus/core/Getter.java
index fda1480..cad7e03 100644
--- a/src/main/java/com/noorq/casser/core/Getter.java
+++ b/src/main/java/net/helenus/core/Getter.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2015 The Casser Authors
+ * Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -13,10 +13,10 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.noorq.casser.core;
+package net.helenus.core;
public interface Getter {
V get();
-
+
}
diff --git a/src/main/java/net/helenus/core/Helenus.java b/src/main/java/net/helenus/core/Helenus.java
new file mode 100644
index 0000000..03f3028
--- /dev/null
+++ b/src/main/java/net/helenus/core/Helenus.java
@@ -0,0 +1,187 @@
+/*
+ * Copyright (C) 2015 The Helenus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package net.helenus.core;
+
+import java.util.*;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+
+import com.datastax.driver.core.Cluster;
+import com.datastax.driver.core.Metadata;
+import com.datastax.driver.core.Session;
+
+import net.helenus.config.DefaultHelenusSettings;
+import net.helenus.config.HelenusSettings;
+import net.helenus.core.reflect.DslExportable;
+import net.helenus.core.reflect.HelenusPropertyNode;
+import net.helenus.mapping.HelenusEntity;
+import net.helenus.support.HelenusMappingException;
+
+public final class Helenus {
+
+ private static volatile HelenusSettings settings = new DefaultHelenusSettings();
+ private static final ConcurrentMap, Object> dslCache = new ConcurrentHashMap, Object>();
+ private static final ConcurrentMap, Metadata> metadataForEntity = new ConcurrentHashMap, Metadata>();
+ private static final Set sessions = new HashSet();
+ private static volatile HelenusSession singleton;
+
+
+ private Helenus() {
+ }
+
+ protected static void setSession(HelenusSession session) {
+ sessions.add(session);
+ singleton = session;
+ }
+
+ public static HelenusSession session() {
+ return singleton;
+ }
+
+ public static void shutdown() {
+ sessions.forEach((session) -> {
+ session.close();
+ sessions.remove(session);
+ });
+ dslCache.clear();
+ }
+
+ public static HelenusSettings settings() {
+ return settings;
+ }
+
+ public static HelenusSettings settings(HelenusSettings overrideSettings) {
+ HelenusSettings old = settings;
+ settings = overrideSettings;
+ return old;
+ }
+
+ public static SessionInitializer connect(Cluster cluster) {
+ Session session = cluster.connect();
+ return new SessionInitializer(session);
+ }
+
+ public static SessionInitializer connect(Cluster cluster, String keyspace) {
+ Session session = cluster.connect(keyspace);
+ return new SessionInitializer(session);
+ }
+
+ public static SessionInitializer init(Session session) {
+
+ if (session == null) {
+ throw new IllegalArgumentException("empty session");
+ }
+
+ return new SessionInitializer(session);
+ }
+
+ public static void clearDslCache() {
+ dslCache.clear();
+ }
+
+ public static E dsl(Class iface) {
+ return dsl(iface, null);
+ }
+
+ public static E dsl(Class iface, Metadata metadata) {
+ return dsl(iface, iface.getClassLoader(), Optional.empty(), metadata);
+ }
+
+ public static E dsl(Class iface, ClassLoader classLoader, Metadata metadata) {
+ return dsl(iface, classLoader, Optional.empty(), metadata);
+ }
+
+ public static E dsl(Class iface, ClassLoader classLoader, Optional parent,
+ Metadata metadata) {
+
+ Object instance = null;
+
+ if (!parent.isPresent()) {
+ instance = dslCache.get(iface);
+ }
+
+ if (instance == null) {
+
+ instance = settings.getDslInstantiator().instantiate(iface, classLoader, parent, metadata);
+
+ if (!parent.isPresent()) {
+
+ Object c = dslCache.putIfAbsent(iface, instance);
+ if (c != null) {
+ instance = c;
+ }
+
+ }
+ }
+
+ return (E) instance;
+ }
+
+ public static E map(Class iface, Map src) {
+ return map(iface, src, iface.getClassLoader());
+ }
+
+ public static E map(Class iface, Map src, ClassLoader classLoader) {
+ return settings.getMapperInstantiator().instantiate(iface, src, classLoader);
+ }
+
+ public static HelenusEntity entity(Class> iface) {
+ return entity(iface, metadataForEntity.get(iface));
+ }
+
+ public static HelenusEntity entity(Class> iface, Metadata metadata) {
+
+ Object dsl = dsl(iface, metadata);
+
+ DslExportable e = (DslExportable) dsl;
+
+ return e.getHelenusMappingEntity();
+ }
+
+ public static HelenusEntity resolve(Object ifaceOrDsl) {
+ return resolve(ifaceOrDsl, metadataForEntity.get(ifaceOrDsl));
+ }
+
+ public static HelenusEntity resolve(Object ifaceOrDsl, Metadata metadata) {
+
+ if (ifaceOrDsl == null) {
+ throw new HelenusMappingException("ifaceOrDsl is null");
+ }
+
+ if (ifaceOrDsl instanceof DslExportable) {
+
+ DslExportable e = (DslExportable) ifaceOrDsl;
+
+ return e.getHelenusMappingEntity();
+ }
+
+ if (ifaceOrDsl instanceof Class) {
+
+ Class> iface = (Class>) ifaceOrDsl;
+
+ if (!iface.isInterface()) {
+ throw new HelenusMappingException("class is not an interface " + iface);
+ }
+
+ metadataForEntity.putIfAbsent(iface, metadata);
+ return entity(iface, metadata);
+
+ }
+
+ throw new HelenusMappingException("unknown dsl object or mapping interface " + ifaceOrDsl);
+ }
+
+}
diff --git a/src/main/java/com/noorq/casser/core/CasserSession.java b/src/main/java/net/helenus/core/HelenusSession.java
similarity index 52%
rename from src/main/java/com/noorq/casser/core/CasserSession.java
rename to src/main/java/net/helenus/core/HelenusSession.java
index 87d20e3..15cff2b 100644
--- a/src/main/java/com/noorq/casser/core/CasserSession.java
+++ b/src/main/java/net/helenus/core/HelenusSession.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2015 The Casser Authors
+ * Copyright (C) 2015 The Helenus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -13,84 +13,87 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.noorq.casser.core;
+package net.helenus.core;
import java.io.Closeable;
import java.io.PrintStream;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.Executor;
+import java.util.concurrent.TimeUnit;
import java.util.function.Function;
-import com.datastax.driver.core.CloseFuture;
-import com.datastax.driver.core.Row;
-import com.datastax.driver.core.Session;
-import com.noorq.casser.core.operation.CountOperation;
-import com.noorq.casser.core.operation.DeleteOperation;
-import com.noorq.casser.core.operation.InsertOperation;
-import com.noorq.casser.core.operation.SelectOperation;
-import com.noorq.casser.core.operation.UpdateOperation;
-import com.noorq.casser.core.reflect.CasserPropertyNode;
-import com.noorq.casser.mapping.CasserEntity;
-import com.noorq.casser.mapping.MappingUtil;
-import com.noorq.casser.mapping.value.ColumnValuePreparer;
-import com.noorq.casser.mapping.value.ColumnValueProvider;
-import com.noorq.casser.mapping.value.RowColumnValueProvider;
-import com.noorq.casser.mapping.value.StatementColumnValuePreparer;
-import com.noorq.casser.mapping.value.ValueProviderMap;
-import com.noorq.casser.support.Fun;
-import com.noorq.casser.support.Fun.Tuple1;
-import com.noorq.casser.support.Fun.Tuple2;
-import com.noorq.casser.support.Fun.Tuple6;
+import com.datastax.driver.core.*;
+import com.google.common.cache.Cache;
+import com.google.common.cache.CacheBuilder;
-public final class CasserSession extends AbstractSessionOperations implements Closeable {
+import net.helenus.core.operation.*;
+import net.helenus.core.reflect.HelenusPropertyNode;
+import net.helenus.mapping.HelenusEntity;
+import net.helenus.mapping.MappingUtil;
+import net.helenus.mapping.value.*;
+import net.helenus.support.Fun;
+import net.helenus.support.Fun.Tuple1;
+import net.helenus.support.Fun.Tuple2;
+import net.helenus.support.Fun.Tuple6;
+
+public final class HelenusSession extends AbstractSessionOperations implements Closeable {
+
+ private final int MAX_CACHE_SIZE = 10000;
+ private final int MAX_CACHE_EXPIRE_SECONDS = 600;
private final Session session;
+ private final CodecRegistry registry;
private volatile String usingKeyspace;
private volatile boolean showCql;
private final PrintStream printStream;
private final SessionRepository sessionRepository;
private final Executor executor;
private final boolean dropSchemaOnClose;
-
+
private final RowColumnValueProvider valueProvider;
private final StatementColumnValuePreparer valuePreparer;
-
- CasserSession(Session session,
- String usingKeyspace,
- boolean showCql,
- PrintStream printStream,
- SessionRepositoryBuilder sessionRepositoryBuilder,
- Executor executor,
- boolean dropSchemaOnClose) {
+ private final Metadata metadata;
+ private final Cache sessionCache;
+ private UnitOfWork currentUnitOfWork;
+
+ HelenusSession(Session session, String usingKeyspace, CodecRegistry registry, boolean showCql,
+ PrintStream printStream, SessionRepositoryBuilder sessionRepositoryBuilder, Executor executor,
+ boolean dropSchemaOnClose) {
this.session = session;
- this.usingKeyspace = Objects.requireNonNull(usingKeyspace, "keyspace needs to be selected before creating session");
+ this.registry = registry == null ? CodecRegistry.DEFAULT_INSTANCE : registry;
+ this.usingKeyspace = Objects.requireNonNull(usingKeyspace,
+ "keyspace needs to be selected before creating session");
this.showCql = showCql;
this.printStream = printStream;
this.sessionRepository = sessionRepositoryBuilder.build();
this.executor = executor;
this.dropSchemaOnClose = dropSchemaOnClose;
-
+
this.valueProvider = new RowColumnValueProvider(this.sessionRepository);
this.valuePreparer = new StatementColumnValuePreparer(this.sessionRepository);
+ this.metadata = session.getCluster().getMetadata();
+ this.sessionCache = CacheBuilder.newBuilder().maximumSize(MAX_CACHE_SIZE)
+ .expireAfterAccess(MAX_CACHE_EXPIRE_SECONDS, TimeUnit.SECONDS).recordStats().build();
+ this.currentUnitOfWork = null;
}
-
+
@Override
public Session currentSession() {
return session;
}
-
+
@Override
public String usingKeyspace() {
return usingKeyspace;
}
-
- public CasserSession useKeyspace(String keyspace) {
+
+ public HelenusSession useKeyspace(String keyspace) {
session.execute(SchemaUtil.use(keyspace, false));
this.usingKeyspace = keyspace;
return this;
}
-
+
@Override
public boolean isShowCql() {
return showCql;
@@ -101,16 +104,16 @@ public final class CasserSession extends AbstractSessionOperations implements Cl
return printStream;
}
- public CasserSession showCql() {
+ public HelenusSession showCql() {
this.showCql = true;
return this;
}
-
- public CasserSession showCql(boolean showCql) {
+
+ public HelenusSession showCql(boolean showCql) {
this.showCql = showCql;
return this;
}
-
+
@Override
public Executor getExecutor() {
return executor;
@@ -125,125 +128,155 @@ public final class CasserSession extends AbstractSessionOperations implements Cl
public ColumnValueProvider getValueProvider() {
return valueProvider;
}
-
+
@Override
public ColumnValuePreparer getValuePreparer() {
return valuePreparer;
}
+ public Metadata getMetadata() { return metadata; }
+
+ public synchronized UnitOfWork begin() {
+ if (currentUnitOfWork == null) {
+ currentUnitOfWork = new UnitOfWork(this);
+ return currentUnitOfWork;
+ } else {
+ return currentUnitOfWork.begin();
+ }
+ }
+
+ public synchronized void commit() throws ConflictingUnitOfWorkException {
+ if (currentUnitOfWork != null) {
+ currentUnitOfWork.commit();
+ currentUnitOfWork = null;
+ }
+ }
+
+ public synchronized void abort() {
+ if (currentUnitOfWork != null) {
+ currentUnitOfWork.abort();
+ currentUnitOfWork = null;
+ }
+ }
+
+ public void cache(String key, Object value) {
+ sessionCache.put(key, value); // ttl
+ }
+
public SelectOperation select(Class entityClass) {
-
- Objects.requireNonNull(entityClass, "entityClass is empty");
+
+ Objects.requireNonNull(entityClass, "entityClass is empty");
ColumnValueProvider valueProvider = getValueProvider();
- CasserEntity entity = Casser.entity(entityClass);
-
+ HelenusEntity entity = Helenus.entity(entityClass);
+
return new SelectOperation(this, entity, (r) -> {
-
+
Map map = new ValueProviderMap(r, valueProvider, entity);
- return (E) Casser.map(entityClass, map);
-
+ return (E) Helenus.map(entityClass, map);
+
});
}
-
+
public SelectOperation select() {
return new SelectOperation(this);
}
-
+
public SelectOperation selectAll(Class> entityClass) {
Objects.requireNonNull(entityClass, "entityClass is empty");
- return new SelectOperation(this, Casser.entity(entityClass));
+ return new SelectOperation(this, Helenus.entity(entityClass));
}
-
+
public SelectOperation selectAll(Class entityClass, Function rowMapper) {
Objects.requireNonNull(entityClass, "entityClass is empty");
Objects.requireNonNull(rowMapper, "rowMapper is empty");
- return new SelectOperation(this, Casser.entity(entityClass), rowMapper);
+ return new SelectOperation(this, Helenus.entity(entityClass), rowMapper);
}
-
+
public SelectOperation> select(Getter getter1) {
Objects.requireNonNull(getter1, "field 1 is empty");
-
- CasserPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
+
+ HelenusPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
return new SelectOperation>(this, new Mappers.Mapper1(getValueProvider(), p1), p1);
}
public SelectOperation> select(Getter getter1, Getter getter2) {
Objects.requireNonNull(getter1, "field 1 is empty");
Objects.requireNonNull(getter2, "field 2 is empty");
-
- CasserPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
- CasserPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
- return new SelectOperation>(this, new Mappers.Mapper2(getValueProvider(), p1, p2), p1, p2);
+
+ HelenusPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
+ HelenusPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
+ return new SelectOperation>(this, new Mappers.Mapper2(getValueProvider(), p1, p2),
+ p1, p2);
}
- public SelectOperation> select(Getter getter1, Getter getter2, Getter getter3) {
+ public SelectOperation> select(Getter getter1, Getter getter2,
+ Getter getter3) {
Objects.requireNonNull(getter1, "field 1 is empty");
Objects.requireNonNull(getter2, "field 2 is empty");
Objects.requireNonNull(getter3, "field 3 is empty");
-
- CasserPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
- CasserPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
- CasserPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3);
- return new SelectOperation>(this, new Mappers.Mapper3(getValueProvider(), p1, p2, p3), p1, p2, p3);
+
+ HelenusPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
+ HelenusPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
+ HelenusPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3);
+ return new SelectOperation>(this,
+ new Mappers.Mapper3(getValueProvider(), p1, p2, p3), p1, p2, p3);
}
- public SelectOperation> select(
- Getter getter1, Getter getter2, Getter getter3, Getter getter4) {
+ public SelectOperation> select(Getter getter1, Getter getter2,
+ Getter getter3, Getter getter4) {
Objects.requireNonNull(getter1, "field 1 is empty");
Objects.requireNonNull(getter2, "field 2 is empty");
Objects.requireNonNull(getter3, "field 3 is empty");
Objects.requireNonNull(getter4, "field 4 is empty");
-
- CasserPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
- CasserPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
- CasserPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3);
- CasserPropertyNode p4 = MappingUtil.resolveMappingProperty(getter4);
- return new SelectOperation>(this, new Mappers.Mapper4(getValueProvider(), p1, p2, p3, p4), p1, p2, p3, p4);
+
+ HelenusPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
+ HelenusPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
+ HelenusPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3);
+ HelenusPropertyNode p4 = MappingUtil.resolveMappingProperty(getter4);
+ return new SelectOperation>(this,
+ new Mappers.Mapper4(getValueProvider(), p1, p2, p3, p4), p1, p2, p3, p4);
}
- public SelectOperation> select(
- Getter getter1, Getter getter2, Getter getter3, Getter getter4, Getter getter5) {
+ public SelectOperation> select(Getter getter1,
+ Getter getter2, Getter getter3, Getter getter4, Getter getter5) {
Objects.requireNonNull(getter1, "field 1 is empty");
Objects.requireNonNull(getter2, "field 2 is empty");
Objects.requireNonNull(getter3, "field 3 is empty");
Objects.requireNonNull(getter4, "field 4 is empty");
Objects.requireNonNull(getter5, "field 5 is empty");
-
- CasserPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
- CasserPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
- CasserPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3);
- CasserPropertyNode p4 = MappingUtil.resolveMappingProperty(getter4);
- CasserPropertyNode p5 = MappingUtil.resolveMappingProperty(getter5);
- return new SelectOperation>(this,
- new Mappers.Mapper5(getValueProvider(), p1, p2, p3, p4, p5),
- p1, p2, p3, p4, p5);
+
+ HelenusPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
+ HelenusPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
+ HelenusPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3);
+ HelenusPropertyNode p4 = MappingUtil.resolveMappingProperty(getter4);
+ HelenusPropertyNode p5 = MappingUtil.resolveMappingProperty(getter5);
+ return new SelectOperation>(this,
+ new Mappers.Mapper5(getValueProvider(), p1, p2, p3, p4, p5), p1, p2, p3, p4, p5);
}
-
- public SelectOperation> select(
- Getter getter1, Getter getter2, Getter getter3,
- Getter getter4, Getter getter5, Getter getter6) {
+
+ public SelectOperation> select(Getter getter1,
+ Getter getter2, Getter getter3, Getter getter4, Getter getter5, Getter getter6) {
Objects.requireNonNull(getter1, "field 1 is empty");
Objects.requireNonNull(getter2, "field 2 is empty");
Objects.requireNonNull(getter3, "field 3 is empty");
Objects.requireNonNull(getter4, "field 4 is empty");
Objects.requireNonNull(getter5, "field 5 is empty");
Objects.requireNonNull(getter6, "field 6 is empty");
-
- CasserPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
- CasserPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
- CasserPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3);
- CasserPropertyNode p4 = MappingUtil.resolveMappingProperty(getter4);
- CasserPropertyNode p5 = MappingUtil.resolveMappingProperty(getter5);
- CasserPropertyNode p6 = MappingUtil.resolveMappingProperty(getter6);
- return new SelectOperation